gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.util.io; import com.intellij.openapi.util.SystemInfo; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.CharsetToolkit; import com.intellij.util.ObjectUtils; import com.intellij.util.containers.ContainerUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.*; import java.nio.file.FileSystems; import java.nio.file.Files; import java.util.Locale; import java.util.Set; import java.util.jar.JarFile; import java.util.zip.ZipEntry; import java.util.zip.ZipOutputStream; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; public class IoTestUtil { private IoTestUtil() { } @NotNull public static File getTempDirectory() { File dir = new File(FileUtil.getTempDirectory()); dir = expandWindowsPath(dir); return dir; } private static File expandWindowsPath(File file) { if (SystemInfo.isWindows && file.getPath().indexOf('~') > 0) { try { return file.getCanonicalFile(); } catch (IOException ignored) { } } return file; } @NotNull public static File createSymLink(@NotNull String target, @NotNull String link) { return createSymLink(target, link, true); } @NotNull public static File createSymLink(@NotNull String target, @NotNull String link, boolean shouldExist) { File linkFile = getFullLinkPath(link); try { Files.createSymbolicLink(linkFile.toPath(), FileSystems.getDefault().getPath(target)); } catch (IOException e) { throw new RuntimeException(e); } assertEquals("target=" + target + ", link=" + linkFile, shouldExist, linkFile.exists()); return linkFile; } @NotNull public static File createHardLink(@NotNull String target, @NotNull String link) { File linkFile = getFullLinkPath(link); try { Files.createLink(linkFile.toPath(), FileSystems.getDefault().getPath(target)); } catch (IOException e) { throw new RuntimeException(e); } assertTrue("target=" + target + ", link=" + linkFile, linkFile.exists()); return linkFile; } @NotNull public static File createJunction(@NotNull String target, @NotNull String junction) { assertTrue(SystemInfo.isWindows); File targetFile = new File(target); assertTrue(targetFile.getPath(), targetFile.isDirectory()); File junctionFile = getFullLinkPath(junction); runCommand("cmd", "/C", "mklink", "/J", junctionFile.getPath(), targetFile.getPath()); assertTrue("target=" + targetFile + ", link=" + junctionFile, junctionFile.isDirectory()); return junctionFile; } public static void deleteJunction(@NotNull String junction) { assertTrue(SystemInfo.isWindows); assertTrue(new File(junction).delete()); } @NotNull public static File createSubst(@NotNull String target) { assertTrue(SystemInfo.isWindows); File targetFile = new File(target); assertTrue(targetFile.getPath(), targetFile.isDirectory()); String substRoot = getFirstFreeDriveLetter() + ":"; runCommand("subst", substRoot, targetFile.getPath()); File rootFile = new File(substRoot + "\\"); assertTrue("target=" + targetFile + ", subst=" + rootFile, rootFile.isDirectory()); return rootFile; } public static void deleteSubst(@NotNull String substRoot) { runCommand("subst", StringUtil.trimEnd(substRoot, "\\"), "/d"); } private static char getFirstFreeDriveLetter() { Set<Character> roots = ContainerUtil.map2Set(File.listRoots(), root -> root.getPath().toUpperCase(Locale.US).charAt(0)); for (char c = 'E'; c <= 'Z'; c++) { if (!roots.contains(c)) { return c; } } throw new RuntimeException("No free roots"); } private static File getFullLinkPath(String link) { File linkFile = new File(link); if (!linkFile.isAbsolute()) { linkFile = new File(getTempDirectory(), link); } assertTrue(link, !linkFile.exists() || linkFile.delete()); File parentDir = linkFile.getParentFile(); assertTrue("link=" + link + ", parent=" + parentDir, parentDir != null && (parentDir.isDirectory() || parentDir.mkdirs())); return linkFile; } private static void runCommand(String... command) { try { ProcessBuilder builder = new ProcessBuilder(command); builder.redirectErrorStream(true); Process process = builder.start(); StringBuilder output = new StringBuilder(); Thread thread = new Thread(() -> { try (BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream()))) { String line; while ((line = reader.readLine()) != null) { output.append(line).append('\n'); } } catch (IOException e) { throw new RuntimeException(e); } }, "io test"); thread.start(); int ret = process.waitFor(); thread.join(); if (ret != 0) { throw new RuntimeException(builder.command() + "\nresult: " + ret + "\noutput:\n" + output); } } catch (IOException | InterruptedException e) { throw new RuntimeException(e); } } public static void assertTimestampsEqual(long expected, long actual) { long roundedExpected = (expected / 1000) * 1000; long roundedActual = (actual / 1000) * 1000; assertEquals("expected: " + expected + ", actual: " + actual, roundedExpected, roundedActual); } public static void assertTimestampsNotEqual(long expected, long actual) { long roundedExpected = (expected / 1000) * 1000; long roundedActual = (actual / 1000) * 1000; assertTrue("(un)expected: " + expected + ", actual: " + actual, roundedExpected != roundedActual); } @NotNull public static File createTestJar() { try { File jarFile = expandWindowsPath(FileUtil.createTempFile("test.", ".jar")); return createTestJar(jarFile); } catch (IOException e) { throw new RuntimeException(e); } } @NotNull public static File createTestJar(File jarFile) { return createTestJar(jarFile, JarFile.MANIFEST_NAME, ""); } @NotNull public static File createTestJar(@NotNull File jarFile, @NotNull String... data) { try (ZipOutputStream stream = new ZipOutputStream(new FileOutputStream(jarFile))) { for (int i = 0; i < data.length; i += 2) { stream.putNextEntry(new ZipEntry(data[i])); stream.write(data[i + 1].getBytes(CharsetToolkit.UTF8_CHARSET)); stream.closeEntry(); } return jarFile; } catch (IOException e) { throw new RuntimeException(e); } } @NotNull public static File createTestJar(@NotNull File jarFile, @NotNull File root) { try (ZipOutputStream stream = new ZipOutputStream(new FileOutputStream(jarFile))) { FileUtil.visitFiles(root, file -> { if (file.isFile()) { String path = FileUtil.toSystemIndependentName(ObjectUtils.assertNotNull(FileUtil.getRelativePath(root, file))); try { stream.putNextEntry(new ZipEntry(path)); stream.write(FileUtil.loadFileBytes(file)); stream.closeEntry(); } catch (IOException e) { throw new RuntimeException(e); } } return true; }); return jarFile; } catch (IOException e) { throw new RuntimeException(e); } } @NotNull public static File createTestDir(@NotNull String name) { return createTestDir(getTempDirectory(), name); } @NotNull public static File createTestDir(@NotNull File parent, @NotNull String name) { File dir = new File(parent, name); assertTrue(dir.getPath(), dir.mkdirs()); return dir; } @NotNull public static File createTestFile(@NotNull String name) { return createTestFile(name, null); } @NotNull public static File createTestFile(@NotNull String name, @Nullable String content) { return createTestFile(getTempDirectory(), name, content); } @NotNull public static File createTestFile(@NotNull File parent, @NotNull String name) { return createTestFile(parent, name, null); } @NotNull public static File createTestFile(@NotNull File parent, @NotNull String name, @Nullable String content) { try { assertTrue(parent.getPath(), parent.isDirectory() || parent.mkdirs()); File file = new File(parent, name); assertTrue(file.getPath(), file.createNewFile()); if (content != null) { FileUtil.writeToFile(file, content); } return file; } catch (IOException e) { throw new RuntimeException(e); } } public static void delete(File... files) { for (File file : files) { if (file != null) { FileUtil.delete(file); } } } public static void setHidden(@NotNull String path, boolean hidden) { assertTrue(SystemInfo.isWindows); runCommand("attrib", hidden ? "+H" : "-H", path); } public static void updateFile(@NotNull File file, String content) { try { FileUtil.writeToFile(file, content); } catch (IOException e) { throw new RuntimeException(e); } } }
// ModalDialog.java // See toplevel license.txt for copyright and license terms. package util.swing; import java.awt.Component; import java.awt.Container; import java.awt.Dialog; import java.awt.Dimension; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.FocusAdapter; import java.awt.event.FocusEvent; import java.awt.event.KeyEvent; import java.util.EnumSet; import java.util.Vector; import javax.swing.Box; import javax.swing.JButton; import javax.swing.JComboBox; import javax.swing.JComponent; import javax.swing.JDialog; import javax.swing.JFrame; import javax.swing.JLabel; import javax.swing.JRootPane; import javax.swing.JTextField; import javax.swing.KeyStroke; import javax.swing.SwingUtilities; /** Base class with common functionality for modal dialogs. */ public class ModalDialog extends JDialog { // --------------- constants --------------- private static final long serialVersionUID = -5968176808231360009L; /** Size of outer margin in a dialog box. */ public static final int OUTER_MARGIN = 11; /** Space between controls. */ public static final int CONTROL_PADDING = 5; // -------------- public data -------------- /** Initially false, this is set to true if the dialog is closed * by pressing the OK button. */ public boolean okWasPressed; // ---------------- methods ---------------- /** Create a new dialog. 'documentParent' is a Component that * originated the request; the top-level window that contains * it will be blocked from interaction until this dialog closes. */ public ModalDialog(Component documentParent, String title) { super(documentParent!=null? SwingUtilities.getWindowAncestor(documentParent) : null, title, Dialog.ModalityType.DOCUMENT_MODAL); this.okWasPressed = false; this.setDefaultCloseOperation(JFrame.DISPOSE_ON_CLOSE); installEscapeCloseOperation(this); } /** Run the dialog, blocking until it is dismissed. Returns true * if the user pressed OK, false if Cancel. */ public boolean exec() { // This blocks until the dialog is dismissed. this.setVisible(true); return this.okWasPressed; } /** Print component sizes for debugging. */ public static void printSizes(String label, Component c) { System.out.println(label+" preferred size: "+c.getPreferredSize()); System.out.println(label+" max size: "+c.getMaximumSize()); System.out.println(label+" min size: "+c.getMinimumSize()); System.out.println(label+" cur size: "+c.getSize()); } /** Create a new JButton with the specified label, mnemonic * (KeyEvent.VK_XXX code, or 0 for none), and action listener. */ public static JButton makeButton(String label, int mnemonic, ActionListener listener) { JButton button = new JButton(label); button.setMnemonic(mnemonic); button.addActionListener(listener); return button; } /** Create a Cancel button and set its action to close the dialog. */ public JButton makeCancelButton() { JButton cancelButton = new JButton("Cancel"); cancelButton.addActionListener(new SwingUtil.WindowCloseAction(this)); return cancelButton; } /** Create an OK button and set its action to close the dialog, * indicating that changes should be preserved. */ public JButton makeOKButton() { JButton okButton = new JButton("OK"); okButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { ModalDialog.this.okPressed(); } }); this.getRootPane().setDefaultButton(okButton); return okButton; } /** React to the OK button being pressed. The base class * implementation remembers that it was pressed and closes the dialog. * Derived classes should copy data from controls into the object that * the dialog is meant to edit, then call super.okPressed(). * * If some inputs need to be validated, do so before calling * super.okPressed(); and if validation fails, do not call it at * all, so the dialog will remain open. */ public void okPressed() { this.okWasPressed = true; SwingUtil.closeWindow(this); } /** Make a vertical layout box. */ public static Box makeVBox(Container parent) { Box b = Box.createVerticalBox(); parent.add(b); return b; } /** Make a horizontal layout box. */ public static Box makeHBox(Container parent) { Box b = Box.createHorizontalBox(); parent.add(b); return b; } /** Make a vertical layout box with the given margin. */ public static Box makeMarginVBox(Container parent, int margin) { Box hb = ModalDialog.makeHBox(parent); hb.add(Box.createHorizontalStrut(margin)); Box vb = ModalDialog.makeVBox(hb); vb.add(Box.createVerticalStrut(margin)); Box ret = ModalDialog.makeVBox(vb); vb.add(Box.createVerticalStrut(margin)); hb.add(Box.createHorizontalStrut(margin)); return ret; } /** Create a line edit control and associated label. */ public static JTextField makeLineEdit(Container parent, String label, char mnemonic, String initialValue) { Box hbox = ModalDialog.makeHBox(parent); JLabel labelControl = new JLabel(label+":"); labelControl.setDisplayedMnemonic(mnemonic); hbox.add(labelControl); hbox.add(Box.createHorizontalStrut(CONTROL_PADDING)); final JTextField ret = new JTextField(initialValue); hbox.add(ret); labelControl.setLabelFor(ret); // Arrange to select all the text when the box receives focus. // http://stackoverflow.com/questions/1178312/how-to-select-all-text-in-a-jformattedtextfield-when-it-gets-focus ret.addFocusListener(new FocusAdapter() { @Override public void focusGained(FocusEvent e) { SwingUtilities.invokeLater(new Runnable() { public void run() { ret.selectAll(); } }); } // This refinement removes focus when we leave. The Swing // text controls draw the selected text with the selection // background even when the control does not have the focus, // which is different from how Qt does it and looks dumb // since tabbing from text control to text control then // works differently from tabbing from text control to some // other kind of control (like a dropdown or button). @Override public void focusLost(FocusEvent e) { SwingUtilities.invokeLater(new Runnable() { public void run() { ret.select(0,0); } }); } }); disallowVertStretch(hbox); return ret; } /** Same as 'makeLineEdit', except also add a help button that will * pop up a help dialog on top of 'parentWindow' with 'helpText'. */ public static JTextField makeLineEditWithHelp( Container parentBox, String label, char mnemonic, String initialValue, Component parentWindow, String helpText) { Box hb = ModalDialog.makeHBox(parentBox); JTextField textField = ModalDialog.makeLineEdit(hb, label, mnemonic, initialValue); hb.add(Box.createHorizontalStrut(ModalDialog.CONTROL_PADDING)); hb.add(ModalDialog.makeHelpButton(parentWindow, label, helpText)); return textField; } /** Set min/max height to preferred height in order to disallow * vertical stretching. */ public static void disallowVertStretch(Component c) { Dimension pref = c.getPreferredSize(); if (pref == null) { // Coverity analysis claims this might return null. The // documentation is not clear. I guess if it does return // null I'll just skip trying to disable vertical stretch. return; } Dimension max = c.getMaximumSize(); Dimension min = c.getMinimumSize(); max.height = pref.height; min.height = pref.height; c.setMaximumSize(max); c.setMinimumSize(min); } /** Arrange to close a dialog when Escape is pressed. * * Based on code from: * http://stackoverflow.com/questions/642925/swing-how-do-i-close-a-dialog-when-the-esc-key-is-pressed */ public static void installEscapeCloseOperation(final JDialog dialog) { JRootPane rootPane = dialog.getRootPane(); rootPane.registerKeyboardAction( new SwingUtil.WindowCloseAction(dialog), KeyStroke.getKeyStroke(KeyEvent.VK_ESCAPE, 0), JComponent.WHEN_IN_FOCUSED_WINDOW); } /** Build a dropdown control for choosing among elements of an * enumeration. * * 'containingBox' is the Box into which we will put an hbox * to contain the combo box and its label. * * 'label' labels the combo box, and should end in a colon (":"). * 'labelMnemonic' is its keyboard shortcut. * * 'elementType' is the enumeration type. * * 'initialValue' what to initially set the box to. */ public static <E extends Enum<E>> JComboBox<E> makeEnumChooser( Box containingBox, String label, char labelMnemonic, Class<E> elementType, E initialValue) { // Put all enumerators into a vector. EnumSet<E> eSet = EnumSet.allOf(elementType); Vector<E> eVector = new Vector<E>(); eVector.addAll(eSet); return makeVectorChooser(containingBox, label, labelMnemonic, eVector, initialValue); } /** Build a dropdown control for choosing among elements of a * vector. * * 'containingBox' is the Box into which we will put an hbox * to contain the combo box and its label. * * 'label' labels the combo box, and should end in a colon (":"). * 'labelMnemonic' is its keyboard shortcut. * * 'elements' is the vector of choices. * * 'initialValue' what to initially set the box to. */ public static <E> JComboBox<E> makeVectorChooser( Box containingBox, String label, char labelMnemonic, Vector<E> elements, E initialValue) { Box hbox = ModalDialog.makeHBox(containingBox); JLabel lbl = new JLabel(label+":"); lbl.setDisplayedMnemonic(labelMnemonic); hbox.add(lbl); hbox.add(Box.createHorizontalStrut(ModalDialog.CONTROL_PADDING)); // Build the combo box. JComboBox<E> comboBox = new JComboBox<E>(elements); comboBox.setSelectedItem(initialValue); lbl.setLabelFor(comboBox); hbox.add(comboBox); ModalDialog.disallowVertStretch(hbox); return comboBox; } /** Create a button that, when pressed, shows a help dialog with * 'helpText' in it. The help dialog caption will be * "Help: $labelText". */ public static JButton makeHelpButton( final Component parentWindow, final String labelText, final String helpText) { // This label causes the help button to be a bit wider than // I would like, but I tried setting the size explicitly and // it did not work (was ignored). JButton helpButton = new JButton("?"); helpButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { SwingUtil.informationMessageBox(parentWindow, "Help: "+labelText, helpText); } }); return helpButton; } /** Create Cancel and OK buttons and add them to 'containingVBox'. */ public void createCancelAndOkButtons(Box containingVBox) { Box btnBox = ModalDialog.makeHBox(containingVBox); // Buttons will be on the right side of the dialog. btnBox.add(Box.createHorizontalGlue()); JButton cancelButton = this.makeCancelButton(); btnBox.add(cancelButton); btnBox.add(Box.createHorizontalStrut(ModalDialog.CONTROL_PADDING)); JButton okButton = this.makeOKButton(); btnBox.add(okButton); ModalDialog.disallowVertStretch(btnBox); } /** Do the usual final actions to create the dialog: create the * Cancel and OK buttons, pack the dialog, and set the location * relative to the parent. */ public void finishBuildingDialog(Box containingVBox) { this.createCancelAndOkButtons(containingVBox); this.pack(); this.setLocationRelativeTo(this.getParent()); } } // EOF
/* * Copyright (c) 2010-2013 Evolveum * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.evolveum.midpoint.web.component; import org.apache.commons.lang3.StringUtils; import org.apache.wicket.AttributeModifier; import org.apache.wicket.Component; import org.apache.wicket.WicketRuntimeException; import org.apache.wicket.extensions.markup.html.tabs.ITab; import org.apache.wicket.markup.ComponentTag; import org.apache.wicket.markup.html.WebMarkupContainer; import org.apache.wicket.markup.html.basic.Label; import org.apache.wicket.markup.html.link.Link; import org.apache.wicket.markup.html.list.Loop; import org.apache.wicket.markup.html.list.LoopItem; import org.apache.wicket.markup.html.panel.Panel; import org.apache.wicket.model.AbstractReadOnlyModel; import org.apache.wicket.model.IModel; import org.apache.wicket.model.Model; import org.apache.wicket.util.lang.Args; import org.jetbrains.annotations.Nullable; import com.evolveum.midpoint.gui.api.GuiStyleConstants; import com.evolveum.midpoint.gui.api.model.CountModelProvider; import com.evolveum.midpoint.web.component.util.VisibleEnableBehaviour; import java.io.Serializable; import java.util.List; /** * @author lazyman * @author Igor Vaynberg (ivaynberg) */ public class TabbedPanel<T extends ITab> extends Panel { /** * id used for child panels */ public static final String TAB_PANEL_ID = "panel"; public static final String RIGHT_SIDE_TAB_ITEM_ID = "rightSideTabItem"; public static final String RIGHT_SIDE_TAB_ID = "rightSideTab"; protected static final String ID_TITLE = "title"; protected static final String ID_COUNT = "count"; protected static final String ID_LINK = "link"; private final IModel<List<T>> tabs; /** * the current tab */ private int currentTab = -1; private transient VisibilityCache visibilityCache; public TabbedPanel(final String id, final List<T> tabs) { this(id, tabs, null); } public TabbedPanel(final String id, final List<T> tabs, @Nullable RightSideItemProvider rightSideItemProvider) { this(id, tabs, null, rightSideItemProvider); } public TabbedPanel(final String id, final List<T> tabs, IModel<Integer> model, @Nullable RightSideItemProvider rightSideItemProvider) { this(id, new Model((Serializable) tabs), model, rightSideItemProvider); } /** * Constructor * * @param id component id * @param tabs list of ITab objects used to represent tabs */ public TabbedPanel(final String id, final IModel<List<T>> tabs) { this(id, tabs, null, null); } /** * Constructor * * @param id component id * @param tabs list of ITab objects used to represent tabs * @param model model holding the index of the selected tab */ public TabbedPanel(final String id, final IModel<List<T>> tabs, IModel<Integer> model, RightSideItemProvider rightSideItemProvider) { super(id, model); this.tabs = Args.notNull(tabs, "tabs"); final IModel<Integer> tabCount = new AbstractReadOnlyModel<Integer>() { private static final long serialVersionUID = 1L; @Override public Integer getObject() { return tabs.getObject().size(); } }; WebMarkupContainer tabsContainer = newTabsContainer("tabs-container"); add(tabsContainer); // add the loop used to generate tab names tabsContainer.add(new Loop("tabs", tabCount) { private static final long serialVersionUID = 1L; @Override protected void populateItem(final LoopItem item) { final int index = item.getIndex(); final T tab = TabbedPanel.this.tabs.getObject().get(index); final WebMarkupContainer titleLink = newLink(ID_LINK, index); titleLink.add(newTitle(ID_TITLE, tab.getTitle(), index)); item.add(titleLink); final IModel<String> countModel; if (tab instanceof CountModelProvider) { countModel = ((CountModelProvider)tab).getCountModel(); } else { countModel = null; } Label countLabel = new Label(ID_COUNT, countModel); countLabel.setVisible(countModel != null); countLabel.add(AttributeModifier.append("class", new AbstractReadOnlyModel<String>() { private static final long serialVersionUID = 1L; @Override public String getObject() { if (countModel == null) { return GuiStyleConstants.CLASS_BADGE_PASSIVE; } String count = countModel.getObject(); if ("0".equals(count)) { return GuiStyleConstants.CLASS_BADGE_PASSIVE; } else { return GuiStyleConstants.CLASS_BADGE_ACTIVE; } } })); titleLink.add(countLabel); } @Override protected LoopItem newItem(final int iteration) { return newTabContainer(iteration); } }); WebMarkupContainer rightSideTabItem = new WebMarkupContainer(RIGHT_SIDE_TAB_ITEM_ID); Component rightSideTabPanel = rightSideItemProvider != null ? rightSideItemProvider.createRightSideItem(RIGHT_SIDE_TAB_ID) : null; if (rightSideTabPanel != null) { rightSideTabItem.add(rightSideTabPanel); } else { rightSideTabItem.setVisible(false); } tabsContainer.add(rightSideTabItem); add(newPanel()); } /** * Override of the default initModel behaviour. This component <strong>will not</strong> use any * compound model of a parent. * * @see org.apache.wicket.Component#initModel() */ @Override protected IModel<?> initModel() { return new Model<Integer>(-1); } /** * Generates the container for all tabs. The default container automatically adds the css * <code>class</code> attribute based on the return value of {@link #getTabContainerCssClass()} * * @param id container id * @return container */ protected WebMarkupContainer newTabsContainer(final String id) { WebMarkupContainer tabs = new WebMarkupContainer(id); tabs.setOutputMarkupId(true); return tabs; } /** * Generates a loop item used to represent a specific tab's <code>li</code> element. * * @param tabIndex * @return new loop item */ protected LoopItem newTabContainer(final int tabIndex) { return new LoopItem(tabIndex) { private static final long serialVersionUID = 1L; @Override protected void onConfigure() { super.onConfigure(); setVisible(getVisiblityCache().isVisible(tabIndex)); } @Override protected void onComponentTag(final ComponentTag tag) { super.onComponentTag(tag); String cssClass = tag.getAttribute("class"); if (cssClass == null) { cssClass = " "; } cssClass += " tab" + getIndex(); if (getIndex() == getSelectedTab()) { cssClass += ' ' + getSelectedTabCssClass(); } if (getVisiblityCache().getLastVisible() == getIndex()) { cssClass += ' ' + getLastTabCssClass(); } tag.put("class", cssClass.trim()); } }; } @Override protected void onBeforeRender() { int index = getSelectedTab(); if (index == -1 || getVisiblityCache().isVisible(index) == false) { // find first visible tab index = -1; for (int i = 0; i < tabs.getObject().size(); i++) { if (getVisiblityCache().isVisible(i)) { index = i; break; } } if (index != -1) { // found a visible tab, so select it setSelectedTab(index); } } setCurrentTab(index); super.onBeforeRender(); } /** * @return the value of css class attribute that will be added to last tab. The default value is * <code>last</code> */ protected String getLastTabCssClass() { return ""; } /** * @return the value of css class attribute that will be added to a div containing the tabs. The * default value is <code>tab-row</code> */ protected String getTabContainerCssClass() { return "tab-row"; } /** * @return the value of css class attribute that will be added to selected tab. The default * value is <code>selected</code> */ protected String getSelectedTabCssClass() { return "active"; } /** * @return list of tabs that can be used by the user to add/remove/reorder tabs in the panel */ public final IModel<List<T>> getTabs() { return tabs; } /** * Factory method for tab titles. Returned component can be anything that can attach to span * tags such as a fragment, panel, or a label * * @param titleId id of tiatle component * @param titleModel model containing tab title * @param index index of tab * @return title component */ protected Component newTitle(final String titleId, final IModel<?> titleModel, final int index) { Label label = new Label(titleId, titleModel); label.setRenderBodyOnly(true); return label; } /** * Factory method for links used to switch between tabs. * <p/> * The created component is attached to the following markup. Label component with id: title * will be added for you by the tabbed panel. * <p/> * <pre> * &lt;a href=&quot;#&quot; wicket:id=&quot;link&quot;&gt;&lt;span wicket:id=&quot;title&quot;&gt;[[tab title]]&lt;/span&gt;&lt;/a&gt; * </pre> * <p/> * Example implementation: * <p/> * <pre> * protected WebMarkupContainer newLink(String linkId, final int index) * { * return new Link(linkId) * { * private static final long serialVersionUID = 1L; * * public void onClick() * { * setSelectedTab(index); * } * }; * } * </pre> * * @param linkId component id with which the link should be created * @param index index of the tab that should be activated when this link is clicked. See * {@link #setSelectedTab(int)}. * @return created link component */ protected WebMarkupContainer newLink(final String linkId, final int index) { return new Link<Void>(linkId) { private static final long serialVersionUID = 1L; @Override public void onClick() { setSelectedTab(index); onTabChange(index); } }; } /** * sets the selected tab * * @param index index of the tab to select * @return this for chaining * @throws IndexOutOfBoundsException if index is not in the range of available tabs */ public TabbedPanel<T> setSelectedTab(final int index) { if ((index < 0) || (index >= tabs.getObject().size())) { throw new IndexOutOfBoundsException(); } setDefaultModelObject(index); // force the tab's component to be aquired again if already the current tab currentTab = -1; setCurrentTab(index); return this; } private void setCurrentTab(int index) { if (this.currentTab == index) { // already current return; } this.currentTab = index; final Component component; if (currentTab == -1 || (tabs.getObject().size() == 0) || !getVisiblityCache().isVisible(currentTab)) { // no tabs or the current tab is not visible component = newPanel(); } else { // show panel from selected tab T tab = tabs.getObject().get(currentTab); component = tab.getPanel(TAB_PANEL_ID); if (component == null) { throw new WicketRuntimeException("ITab.getPanel() returned null. TabbedPanel [" + getPath() + "] ITab index [" + currentTab + "]"); } } if (!component.getId().equals(TAB_PANEL_ID)) { throw new WicketRuntimeException( "ITab.getPanel() returned a panel with invalid id [" + component.getId() + "]. You must always return a panel with id equal to the provided panelId parameter. TabbedPanel [" + getPath() + "] ITab index [" + currentTab + "]"); } addOrReplace(component); } private WebMarkupContainer newPanel() { return new WebMarkupContainer(TAB_PANEL_ID); } /** * @return index of the selected tab */ public final int getSelectedTab() { return (Integer) getDefaultModelObject(); } @Override protected void onDetach() { visibilityCache = null; super.onDetach(); } private VisibilityCache getVisiblityCache() { if (visibilityCache == null) { visibilityCache = new VisibilityCache(); } return visibilityCache; } /** * A cache for visibilities of {@link ITab}s. */ private class VisibilityCache { /** * Visibility for each tab. */ private Boolean[] visibilities; /** * Last visible tab. */ private int lastVisible = -1; public VisibilityCache() { visibilities = new Boolean[tabs.getObject().size()]; } public int getLastVisible() { if (lastVisible == -1) { for (int t = 0; t < tabs.getObject().size(); t++) { if (isVisible(t)) { lastVisible = t; } } } return lastVisible; } public boolean isVisible(int index) { if (visibilities.length < index + 1) { Boolean[] resized = new Boolean[index + 1]; System.arraycopy(visibilities, 0, resized, 0, visibilities.length); visibilities = resized; } if (visibilities.length > 0) { Boolean visible = visibilities[index]; if (visible == null) { List<T> tabsList = tabs.getObject(); T tab = tabsList == null || tabsList.size() == 0 ? null : tabs.getObject().get(index); visible = tab != null && tab.isVisible(); if (tab != null) { visibilities[index] = visible; } } return visible; } else { return false; } } } /** * Method called after tab was changed - user clicked on link in tab header. * * @param index Index of new tab. */ protected void onTabChange(int index) {} public interface RightSideItemProvider extends Serializable { Component createRightSideItem(String id); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.jmeter.protocol.http.proxy; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.net.URI; import java.net.URISyntaxException; import java.net.URL; import java.util.HashMap; import java.util.Map; import java.util.StringTokenizer; import org.apache.commons.lang3.CharUtils; import org.apache.jmeter.protocol.http.config.MultipartUrlConfig; import org.apache.jmeter.protocol.http.control.Header; import org.apache.jmeter.protocol.http.control.HeaderManager; import org.apache.jmeter.protocol.http.gui.HeaderPanel; import org.apache.jmeter.protocol.http.sampler.HTTPSamplerBase; import org.apache.jmeter.protocol.http.sampler.HTTPSamplerFactory; import org.apache.jmeter.protocol.http.util.ConversionUtils; import org.apache.jmeter.protocol.http.util.HTTPConstants; import org.apache.jmeter.testelement.TestElement; import org.apache.jmeter.util.JMeterUtils; import org.apache.jorphan.logging.LoggingManager; import org.apache.log.Logger; //For unit tests, @see TestHttpRequestHdr /** * The headers of the client HTTP request. * */ public class HttpRequestHdr { private static final Logger log = LoggingManager.getLoggerForClass(); private static final String HTTP = "http"; // $NON-NLS-1$ private static final String HTTPS = "https"; // $NON-NLS-1$ private static final String PROXY_CONNECTION = "proxy-connection"; // $NON-NLS-1$ public static final String CONTENT_TYPE = "content-type"; // $NON-NLS-1$ public static final String CONTENT_LENGTH = "content-length"; // $NON-NLS-1$ /** * Http Request method, uppercased, e.g. GET or POST. */ private String method = ""; // $NON-NLS-1$ /** CONNECT url. */ private String paramHttps = ""; // $NON-NLS-1$ /** * The requested url. The universal resource locator that hopefully uniquely * describes the object or service the client is requesting. */ private String url = ""; // $NON-NLS-1$ /** * Version of http being used. Such as HTTP/1.0. */ private String version = ""; // NOTREAD // $NON-NLS-1$ private byte[] rawPostData; private final Map<String, Header> headers = new HashMap<>(); private final String httpSamplerName; private HeaderManager headerManager; private String firstLine; // saved copy of first line for error reports public HttpRequestHdr() { this.httpSamplerName = ""; // $NON-NLS-1$ this.firstLine = "" ; // $NON-NLS-1$ } /** * @param httpSamplerName the http sampler name */ public HttpRequestHdr(String httpSamplerName) { this.httpSamplerName = httpSamplerName; } /** * Parses a http header from a stream. * * @param in * the stream to parse. * @return array of bytes from client. * @throws IOException when reading the input stream fails */ public byte[] parse(InputStream in) throws IOException { boolean inHeaders = true; int readLength = 0; int dataLength = 0; boolean firstLine = true; ByteArrayOutputStream clientRequest = new ByteArrayOutputStream(); ByteArrayOutputStream line = new ByteArrayOutputStream(); int x; while ((inHeaders || readLength < dataLength) && ((x = in.read()) != -1)) { line.write(x); clientRequest.write(x); if (firstLine && !CharUtils.isAscii((char) x)){// includes \n throw new IllegalArgumentException("Only ASCII supported in headers (perhaps SSL was used?)"); } if (inHeaders && (byte) x == (byte) '\n') { // $NON-NLS-1$ if (line.size() < 3) { inHeaders = false; firstLine = false; // cannot be first line either } final String reqLine = line.toString(); if (firstLine) { parseFirstLine(reqLine); firstLine = false; } else { // parse other header lines, looking for Content-Length final int contentLen = parseLine(reqLine); if (contentLen > 0) { dataLength = contentLen; // Save the last valid content length one } } if (log.isDebugEnabled()){ log.debug("Client Request Line: '" + reqLine.replaceFirst("\r\n$", "<CRLF>") + "'"); } line.reset(); } else if (!inHeaders) { readLength++; } } // Keep the raw post data rawPostData = line.toByteArray(); if (log.isDebugEnabled()){ log.debug("rawPostData in default JRE encoding: " + new String(rawPostData)); // TODO - charset? log.debug("Request: '" + clientRequest.toString().replaceAll("\r\n", "<CRLF>") + "'"); } return clientRequest.toByteArray(); } private void parseFirstLine(String firstLine) { this.firstLine = firstLine; if (log.isDebugEnabled()) { log.debug("browser request: " + firstLine.replaceFirst("\r\n$", "<CRLF>")); } StringTokenizer tz = new StringTokenizer(firstLine); method = getToken(tz).toUpperCase(java.util.Locale.ENGLISH); url = getToken(tz); version = getToken(tz); if (log.isDebugEnabled()) { log.debug("parsed method: " + method); log.debug("parsed url/host: " + url); // will be host:port for CONNECT log.debug("parsed version: " + version); } // SSL connection if (getMethod().startsWith(HTTPConstants.CONNECT)) { paramHttps = url; return; // Don't try to adjust the host name } /* The next line looks odd, but proxied HTTP requests look like: * GET http://www.apache.org/foundation/ HTTP/1.1 * i.e. url starts with "http:", not "/" * whereas HTTPS proxy requests look like: * CONNECT www.google.co.uk:443 HTTP/1.1 * followed by * GET /?gws_rd=cr HTTP/1.1 */ if (url.startsWith("/")) { // it must be a proxied HTTPS request url = HTTPS + "://" + paramHttps + url; // $NON-NLS-1$ } // JAVA Impl accepts URLs with unsafe characters so don't do anything if(HTTPSamplerFactory.IMPL_JAVA.equals(httpSamplerName)) { log.debug("First Line url: " + url); return; } try { // See Bug 54482 URI testCleanUri = new URI(url); if(log.isDebugEnabled()) { log.debug("Successfully built URI from url:"+url+" => " + testCleanUri.toString()); } } catch (URISyntaxException e) { log.warn("Url '" + url + "' contains unsafe characters, will escape it, message:"+e.getMessage()); try { String escapedUrl = ConversionUtils.escapeIllegalURLCharacters(url); if(log.isDebugEnabled()) { log.debug("Successfully escaped url:'"+url +"' to:'"+escapedUrl+"'"); } url = escapedUrl; } catch (Exception e1) { log.error("Error escaping URL:'"+url+"', message:"+e1.getMessage()); } } log.debug("First Line url: " + url); } /* * Split line into name/value pairs and store in headers if relevant * If name = "content-length", then return value as int, else return 0 */ private int parseLine(String nextLine) { int colon = nextLine.indexOf(':'); if (colon <= 0){ return 0; // Nothing to do } String name = nextLine.substring(0, colon).trim(); String value = nextLine.substring(colon+1).trim(); headers.put(name.toLowerCase(java.util.Locale.ENGLISH), new Header(name, value)); if (name.equalsIgnoreCase(CONTENT_LENGTH)) { return Integer.parseInt(value); } return 0; } private HeaderManager createHeaderManager() { HeaderManager manager = new HeaderManager(); for (Map.Entry<String, Header> entry : headers.entrySet()) { final String key = entry.getKey(); if (!key.equals(PROXY_CONNECTION) && !key.equals(CONTENT_LENGTH) && !key.equalsIgnoreCase(HTTPConstants.HEADER_CONNECTION)) { manager.add(entry.getValue()); } } manager.setName(JMeterUtils.getResString("header_manager_title")); // $NON-NLS-1$ manager.setProperty(TestElement.TEST_CLASS, HeaderManager.class.getName()); manager.setProperty(TestElement.GUI_CLASS, HeaderPanel.class.getName()); return manager; } public HeaderManager getHeaderManager() { if(headerManager == null) { headerManager = createHeaderManager(); } return headerManager; } public String getContentType() { Header contentTypeHeader = headers.get(CONTENT_TYPE); if (contentTypeHeader != null) { return contentTypeHeader.getValue(); } return null; } private boolean isMultipart(String contentType) { if (contentType != null && contentType.startsWith(HTTPConstants.MULTIPART_FORM_DATA)) { return true; } return false; } public MultipartUrlConfig getMultipartConfig(String contentType) { if(isMultipart(contentType)) { // Get the boundary string for the multiparts from the content type String boundaryString = contentType.substring(contentType.toLowerCase(java.util.Locale.ENGLISH).indexOf("boundary=") + "boundary=".length()); return new MultipartUrlConfig(boundaryString); } return null; } // // Parsing Methods // /** * Find the //server.name from an url. * * @return server's internet name */ public String serverName() { // chop to "server.name:x/thing" String str = url; int i = str.indexOf("//"); // $NON-NLS-1$ if (i > 0) { str = str.substring(i + 2); } // chop to server.name:xx i = str.indexOf('/'); // $NON-NLS-1$ if (0 < i) { str = str.substring(0, i); } // chop to server.name i = str.lastIndexOf(':'); // $NON-NLS-1$ if (0 < i) { str = str.substring(0, i); } // Handle IPv6 urls if(str.startsWith("[")&& str.endsWith("]")) { return str.substring(1, str.length()-1); } return str; } // TODO replace repeated substr() above and below with more efficient method. /** * Find the :PORT from http://server.ect:PORT/some/file.xxx * * @return server's port (or UNSPECIFIED if not found) */ public int serverPort() { String str = url; // chop to "server.name:x/thing" int i = str.indexOf("//"); if (i > 0) { str = str.substring(i + 2); } // chop to server.name:xx i = str.indexOf('/'); if (0 < i) { str = str.substring(0, i); } // chop to server.name i = str.lastIndexOf(':'); if (0 < i) { return Integer.parseInt(str.substring(i + 1).trim()); } return HTTPSamplerBase.UNSPECIFIED_PORT; } /** * Find the /some/file.xxxx from http://server.ect:PORT/some/file.xxx * * @return the path */ public String getPath() { String str = url; int i = str.indexOf("//"); if (i > 0) { str = str.substring(i + 2); } i = str.indexOf('/'); if (i < 0) { return ""; } return str.substring(i); } /** * Returns the url string extracted from the first line of the client request. * * @return the url */ public String getUrl(){ return url; } /** * Returns the method string extracted from the first line of the client request. * * @return the method (will always be upper case) */ public String getMethod(){ return method; } public String getFirstLine() { return firstLine; } /** * Returns the next token in a string. * * @param tk * String that is partially tokenized. * @return The remainder */ private String getToken(StringTokenizer tk) { if (tk.hasMoreTokens()) { return tk.nextToken(); } return "";// $NON-NLS-1$ } public String getUrlWithoutQuery(URL _url) { String fullUrl = _url.toString(); String urlWithoutQuery = fullUrl; String query = _url.getQuery(); if(query != null) { // Get rid of the query and the ? urlWithoutQuery = urlWithoutQuery.substring(0, urlWithoutQuery.length() - query.length() - 1); } return urlWithoutQuery; } /** * @return the httpSamplerName */ public String getHttpSamplerName() { return httpSamplerName; } /** * @return byte[] Raw post data */ public byte[] getRawPostData() { return rawPostData; } /** * @param sampler {@link HTTPSamplerBase} * @return String Protocol (http or https) */ public String getProtocol(HTTPSamplerBase sampler) { if (url.contains("//")) { String protocol = url.substring(0, url.indexOf(':')); if (log.isDebugEnabled()) { log.debug("Proxy: setting protocol to : " + protocol); } return protocol; } else if (sampler.getPort() == HTTPConstants.DEFAULT_HTTPS_PORT) { if (log.isDebugEnabled()) { log.debug("Proxy: setting protocol to https"); } return HTTPS; } else { if (log.isDebugEnabled()) { log.debug("Proxy setting default protocol to: http"); } return HTTP; } } }
/* * Copyright 2016 Agapsys Tecnologia Ltda-ME. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.agapsys.web.toolkit.services; import com.agapsys.web.toolkit.AbstractApplication; import com.agapsys.web.toolkit.services.LdapService.LdapException.LdapExceptionType; import com.agapsys.web.toolkit.Service; import java.util.Collections; import java.util.Hashtable; import java.util.LinkedList; import java.util.List; import javax.naming.AuthenticationException; import javax.naming.AuthenticationNotSupportedException; import javax.naming.CommunicationException; import javax.naming.Context; import javax.naming.NamingEnumeration; import javax.naming.NamingException; import javax.naming.directory.Attribute; import javax.naming.directory.Attributes; import javax.naming.directory.DirContext; import javax.naming.directory.InitialDirContext; import javax.naming.directory.SearchControls; import javax.naming.directory.SearchResult; public class LdapService extends Service { // <editor-fold desc="STATIC SCOPE"> // ========================================================================= public static class LdapException extends Exception { // CLASS SCOPE ========================================================= public static enum LdapExceptionType { INVALID_CREDENTIALS, AUTHENTICATION_NOT_SUPPORTED, COMMUNICATION_FAILURE, NAMING_ERROR } // ===================================================================== private final LdapExceptionType exceptionType; private LdapException(LdapExceptionType exceptionType, String message, Throwable cause) { super(message, cause); this.exceptionType = exceptionType; } private LdapException(LdapExceptionType exceptionType, Throwable cause) { super(cause); this.exceptionType = exceptionType; } public LdapExceptionType getExceptionType() { return exceptionType; } } public static class LdapAttribute { private final String name; private final List<String> values = new LinkedList<>(); private final List<String> unmodifiableValues = Collections.unmodifiableList(values); private LdapAttribute(Attribute attribute) throws NamingException { this.name = attribute.getID(); NamingEnumeration nem = attribute.getAll(); while(nem.hasMoreElements()) { Object valueObj = nem.next(); if (valueObj instanceof String) this.values.add(valueObj.toString()); } } public String getName() { return name; } public List<String> getValues() { return unmodifiableValues; } @Override public String toString() { return String.format("%s: %s", getName(), getValues().toString()); } } public static class LdapUser { private final String dn; private final List<LdapAttribute> attributes = new LinkedList<>(); private final List<LdapAttribute> unmodifiableAttributes = Collections.unmodifiableList(attributes); private LdapUser(String dn, Attributes coreAttributes) throws NamingException { this.dn = dn; NamingEnumeration<? extends Attribute> attrs = coreAttributes.getAll(); while(attrs.hasMoreElements()) { Attribute attr = attrs.next(); this.attributes.add(new LdapAttribute(attr)); } } public String getDn() { return dn; } public List<LdapAttribute> getAttributes() { return unmodifiableAttributes; } } private static final String PROPERTY_PREFIX = LdapService.class.getName(); public static final String KEY_LDAP_URL = PROPERTY_PREFIX + ".url"; public static final String KEY_SEARCH_BASE_DN = PROPERTY_PREFIX + ".baseDn"; public static final String KEY_SEARCH_PATTERN = PROPERTY_PREFIX + ".searchPattern"; public static final String KEY_SEARCH_USER_DN = PROPERTY_PREFIX + ".searchUserDn"; public static final String KEY_SEARCH_USER_PASSWORD = PROPERTY_PREFIX + ".searchUserPassword"; private static final String DEFAULT_LDAP_URL = "ldaps://ldap.server:9876"; private static final String DEFAULT_SEARCH_BASE_DN = "ou=users,dc=ldap,dc=server"; private static final String DEFAULT_SEARCH_PATTERN = "(&(objectClass=uidObject)(uid=%s))"; private static final String DEFAULT_SEARCH_USER_DN = "cn=admin,dc=ldap,dc=sever"; private static final String DEFAULT_SEARCH_USER_PASSWORD = "password"; // ========================================================================= // </editor-fold> private String ldapUrl; private String searchBaseDn; private String searchPattern; private String searchUserDn; private char[] searchUserPassword; public LdapService() { __reset(); } private void __reset() { ldapUrl = null; searchBaseDn = null; searchPattern = null; searchUserDn = null; searchUserPassword = null; } @Override protected void onStart() { super.onStart(); synchronized(this) { __reset(); AbstractApplication app = getApplication(); ldapUrl = app.getProperty(KEY_LDAP_URL, DEFAULT_LDAP_URL); searchBaseDn = app.getProperty(KEY_SEARCH_BASE_DN, DEFAULT_SEARCH_BASE_DN); searchPattern = app.getProperty(KEY_SEARCH_PATTERN, DEFAULT_SEARCH_PATTERN); searchUserDn = app.getProperty(KEY_SEARCH_USER_DN, DEFAULT_SEARCH_USER_DN); searchUserPassword = app.getProperty(KEY_SEARCH_USER_PASSWORD, DEFAULT_SEARCH_USER_PASSWORD).toCharArray(); } } public String getLdapUrl() { synchronized(this) { return ldapUrl; } } public String getSearchBaseDn() { synchronized (this) { return searchBaseDn; } } public String getSearchPattern() { synchronized(this) { return searchPattern; } } public String getSearchUserDn() { synchronized(this) { return searchUserDn; } } protected char[] getSearchUserPassword() { synchronized(this) { return searchUserPassword; } } private DirContext __getContext(String url, String userDn, char[] password) throws LdapException { Hashtable<String, Object> properties = new Hashtable<>(); properties.put(Context.PROVIDER_URL, url); properties.put(Context.SECURITY_PRINCIPAL, userDn); properties.put(Context.SECURITY_CREDENTIALS, password); properties.put(Context.INITIAL_CONTEXT_FACTORY, "com.sun.jndi.ldap.LdapCtxFactory"); properties.put(Context.URL_PKG_PREFIXES, "com.sun.jndi.url"); properties.put(Context.REFERRAL, "ignore"); properties.put(Context.SECURITY_AUTHENTICATION, "simple"); try { return new InitialDirContext(properties); } catch (AuthenticationException ex) { throw new LdapException(LdapExceptionType.INVALID_CREDENTIALS, String.format("Invalid credentials for %s", userDn), ex); } catch (AuthenticationNotSupportedException ex) { throw new LdapException(LdapExceptionType.AUTHENTICATION_NOT_SUPPORTED, "Authentication not supported", ex); } catch (CommunicationException ex) { throw new LdapException(LdapExceptionType.COMMUNICATION_FAILURE, "Communication failure", ex); } catch (NamingException ex) { throw new LdapException(LdapExceptionType.NAMING_ERROR, ex); } } private SearchResult __searchUser(DirContext ctx, String searchBase, String searchPattern, String userId) throws LdapException { try { SearchControls constraints = new SearchControls(); constraints.setSearchScope(SearchControls.SUBTREE_SCOPE); NamingEnumeration<SearchResult> results = ctx.search( searchBase, String.format(searchPattern, userId), constraints ); if (results.hasMoreElements()) { SearchResult sr = (SearchResult) results.next(); return sr; } else { return null; } } catch (NamingException ex) { throw new LdapException(LdapExceptionType.NAMING_ERROR, ex); } } private LdapUser __getUser(String userId, char[] password) throws LdapException, NamingException { DirContext ctx; SearchResult searchResult; String userDn = null; ctx = __getContext(getLdapUrl(), getSearchUserDn(), getSearchUserPassword()); searchResult = __searchUser(ctx, getSearchBaseDn(), getSearchPattern(), userId); boolean found; if (searchResult != null) { userDn = searchResult.getNameInNamespace(); found = true; } else { found = false; } ctx.close(); ctx = null; if (found) { // Once a user is found, try to authenticate it try { ctx = __getContext(getLdapUrl(), userDn, password); return new LdapUser(userDn, ctx.getAttributes(userDn)); } catch (LdapException ex) { if (ex.getExceptionType() == LdapExceptionType.INVALID_CREDENTIALS) return null; throw ex; } finally { if (ctx != null) ctx.close(); } } else { return null; } } public LdapUser getUser(String userId, char[] password) throws LdapException { synchronized(this) { if (!isRunning()) throw new IllegalStateException("Service is not running"); try { return __getUser(userId, password); } catch (NamingException ex) { throw new LdapException(LdapExceptionType.NAMING_ERROR, ex); } } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.aggregations.bucket; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.MatchNoneQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryRewriteContext; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; import org.elasticsearch.search.aggregations.bucket.filter.FiltersAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.filter.FiltersAggregator.KeyedFilter; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValueType; import java.io.IOException; import java.util.Collections; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; public class FiltersTests extends BaseAggregationTestCase<FiltersAggregationBuilder> { @Override protected FiltersAggregationBuilder createTestAggregatorBuilder() { int size = randomIntBetween(1, 20); FiltersAggregationBuilder factory; if (randomBoolean()) { KeyedFilter[] filters = new KeyedFilter[size]; int i = 0; for (String key : randomUnique(() -> randomAlphaOfLengthBetween(1, 20), size)) { filters[i++] = new KeyedFilter(key, QueryBuilders.termQuery(randomAlphaOfLengthBetween(5, 20), randomAlphaOfLengthBetween(5, 20))); } factory = new FiltersAggregationBuilder(randomAlphaOfLengthBetween(1, 20), filters); } else { QueryBuilder[] filters = new QueryBuilder[size]; for (int i = 0; i < size; i++) { filters[i] = QueryBuilders.termQuery(randomAlphaOfLengthBetween(5, 20), randomAlphaOfLengthBetween(5, 20)); } factory = new FiltersAggregationBuilder(randomAlphaOfLengthBetween(1, 20), filters); } if (randomBoolean()) { factory.otherBucket(randomBoolean()); } if (randomBoolean()) { factory.otherBucketKey(randomAlphaOfLengthBetween(1, 20)); } return factory; } /** * Test that when passing in keyed filters as list or array, the list stored internally is sorted by key * Also check the list passed in is not modified by this but rather copied */ public void testFiltersSortedByKey() { KeyedFilter[] original = new KeyedFilter[]{new KeyedFilter("bbb", new MatchNoneQueryBuilder()), new KeyedFilter("aaa", new MatchNoneQueryBuilder())}; FiltersAggregationBuilder builder; builder = new FiltersAggregationBuilder("my-agg", original); assertEquals("aaa", builder.filters().get(0).key()); assertEquals("bbb", builder.filters().get(1).key()); // original should be unchanged assertEquals("bbb", original[0].key()); assertEquals("aaa", original[1].key()); } public void testOtherBucket() throws IOException { XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); builder.startObject(); builder.startArray("filters") .startObject().startObject("term").field("field", "foo").endObject().endObject() .endArray(); builder.endObject(); try (XContentParser parser = createParser(shuffleXContent(builder))) { parser.nextToken(); FiltersAggregationBuilder filters = FiltersAggregationBuilder.parse("agg_name", parser); // The other bucket is disabled by default assertFalse(filters.otherBucket()); builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); builder.startObject(); builder.startArray("filters") .startObject().startObject("term").field("field", "foo").endObject().endObject() .endArray(); builder.field("other_bucket_key", "some_key"); builder.endObject(); } try (XContentParser parser = createParser(shuffleXContent(builder))) { parser.nextToken(); FiltersAggregationBuilder filters = FiltersAggregationBuilder.parse("agg_name", parser); // but setting a key enables it automatically assertTrue(filters.otherBucket()); builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); builder.startObject(); builder.startArray("filters") .startObject().startObject("term").field("field", "foo").endObject().endObject() .endArray(); builder.field("other_bucket", false); builder.field("other_bucket_key", "some_key"); builder.endObject(); } try (XContentParser parser = createParser(shuffleXContent(builder))) { parser.nextToken(); FiltersAggregationBuilder filters = FiltersAggregationBuilder.parse("agg_name", parser); // unless the other bucket is explicitly disabled assertFalse(filters.otherBucket()); } } public void testRewrite() throws IOException { // test non-keyed filter that doesn't rewrite AggregationBuilder original = new FiltersAggregationBuilder("my-agg", new MatchAllQueryBuilder()); original.setMetadata(Collections.singletonMap(randomAlphaOfLengthBetween(1, 20), randomAlphaOfLengthBetween(1, 20))); AggregationBuilder rewritten = original.rewrite(new QueryRewriteContext(xContentRegistry(), null, null, () -> 0L)); assertSame(original, rewritten); // test non-keyed filter that does rewrite original = new FiltersAggregationBuilder("my-agg", new BoolQueryBuilder()); rewritten = original.rewrite(new QueryRewriteContext(xContentRegistry(), null, null, () -> 0L)); assertNotSame(original, rewritten); assertThat(rewritten, instanceOf(FiltersAggregationBuilder.class)); assertEquals("my-agg", ((FiltersAggregationBuilder) rewritten).getName()); assertEquals(1, ((FiltersAggregationBuilder) rewritten).filters().size()); assertEquals("0", ((FiltersAggregationBuilder) rewritten).filters().get(0).key()); assertThat(((FiltersAggregationBuilder) rewritten).filters().get(0).filter(), instanceOf(MatchAllQueryBuilder.class)); assertFalse(((FiltersAggregationBuilder) rewritten).isKeyed()); // test keyed filter that doesn't rewrite original = new FiltersAggregationBuilder("my-agg", new KeyedFilter("my-filter", new MatchAllQueryBuilder())); rewritten = original.rewrite(new QueryRewriteContext(xContentRegistry(), null, null, () -> 0L)); assertSame(original, rewritten); // test non-keyed filter that does rewrite original = new FiltersAggregationBuilder("my-agg", new KeyedFilter("my-filter", new BoolQueryBuilder())); rewritten = original.rewrite(new QueryRewriteContext(xContentRegistry(), null, null, () -> 0L)); assertNotSame(original, rewritten); assertThat(rewritten, instanceOf(FiltersAggregationBuilder.class)); assertEquals("my-agg", ((FiltersAggregationBuilder) rewritten).getName()); assertEquals(1, ((FiltersAggregationBuilder) rewritten).filters().size()); assertEquals("my-filter", ((FiltersAggregationBuilder) rewritten).filters().get(0).key()); assertThat(((FiltersAggregationBuilder) rewritten).filters().get(0).filter(), instanceOf(MatchAllQueryBuilder.class)); assertTrue(((FiltersAggregationBuilder) rewritten).isKeyed()); // test sub-agg filter that does rewrite original = new TermsAggregationBuilder("terms").userValueTypeHint(ValueType.BOOLEAN) .subAggregation( new FiltersAggregationBuilder("my-agg", new KeyedFilter("my-filter", new BoolQueryBuilder())) ); rewritten = original.rewrite(new QueryRewriteContext(xContentRegistry(), null, null, () -> 0L)); assertNotSame(original, rewritten); assertNotEquals(original, rewritten); assertThat(rewritten, instanceOf(TermsAggregationBuilder.class)); assertThat(rewritten.getSubAggregations().size(), equalTo(1)); AggregationBuilder subAgg = rewritten.getSubAggregations().iterator().next(); assertThat(subAgg, instanceOf(FiltersAggregationBuilder.class)); assertNotSame(original.getSubAggregations().iterator().next(), subAgg); assertEquals("my-agg", subAgg.getName()); assertSame(rewritten, rewritten.rewrite(new QueryRewriteContext(xContentRegistry(), null, null, () -> 0L))); } public void testRewritePreservesOtherBucket() throws IOException { FiltersAggregationBuilder originalFilters = new FiltersAggregationBuilder("my-agg", new BoolQueryBuilder()); originalFilters.otherBucket(randomBoolean()); originalFilters.otherBucketKey(randomAlphaOfLength(10)); AggregationBuilder rewritten = originalFilters.rewrite(new QueryRewriteContext(xContentRegistry(), null, null, () -> 0L)); assertThat(rewritten, instanceOf(FiltersAggregationBuilder.class)); FiltersAggregationBuilder rewrittenFilters = (FiltersAggregationBuilder) rewritten; assertEquals(originalFilters.otherBucket(), rewrittenFilters.otherBucket()); assertEquals(originalFilters.otherBucketKey(), rewrittenFilters.otherBucketKey()); } public void testEmptyFilters() throws IOException { { XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); builder.startObject(); builder.startArray("filters").endArray(); // unkeyed array builder.endObject(); XContentParser parser = createParser(shuffleXContent(builder)); parser.nextToken(); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> FiltersAggregationBuilder.parse("agg_name", parser)); assertThat(e.getMessage(), equalTo("[filters] cannot be empty.")); } { XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); builder.startObject(); builder.startObject("filters").endObject(); // keyed object builder.endObject(); XContentParser parser = createParser(shuffleXContent(builder)); parser.nextToken(); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> FiltersAggregationBuilder.parse("agg_name", parser)); assertThat(e.getMessage(), equalTo("[filters] cannot be empty.")); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.drill.exec.store.pcap; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import org.apache.drill.common.exceptions.ExecutionSetupException; import org.apache.drill.common.expression.SchemaPath; import org.apache.drill.common.types.TypeProtos; import org.apache.drill.common.types.TypeProtos.MajorType; import org.apache.drill.common.types.TypeProtos.MinorType; import org.apache.drill.common.types.Types; import org.apache.drill.exec.exception.SchemaChangeException; import org.apache.drill.exec.expr.TypeHelper; import org.apache.drill.exec.ops.OperatorContext; import org.apache.drill.exec.physical.impl.OutputMutator; import org.apache.drill.exec.record.MaterializedField; import org.apache.drill.exec.store.AbstractRecordReader; import org.apache.drill.exec.store.pcap.decoder.Packet; import org.apache.drill.exec.store.pcap.decoder.PacketDecoder; import org.apache.drill.exec.store.pcap.dto.ColumnDto; import org.apache.drill.exec.store.pcap.schema.PcapTypes; import org.apache.drill.exec.store.pcap.schema.Schema; import org.apache.drill.exec.vector.NullableBigIntVector; import org.apache.drill.exec.vector.NullableIntVector; import org.apache.drill.exec.vector.NullableTimeStampVector; import org.apache.drill.exec.vector.NullableVarCharVector; import org.apache.drill.exec.vector.UInt1Vector; import org.apache.drill.exec.vector.ValueVector; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.nio.ByteBuffer; import java.util.List; import java.util.Map; import static java.nio.charset.StandardCharsets.UTF_8; public class PcapRecordReader extends AbstractRecordReader { private OutputMutator output; private final PacketDecoder decoder; private ImmutableList<ProjectedColumnInfo> projectedCols; private byte[] buffer = new byte[100000]; private int offset = 0; private InputStream in; private int validBytes; private static final Map<PcapTypes, MinorType> TYPES; private static class ProjectedColumnInfo { ValueVector vv; ColumnDto pcapColumn; } static { TYPES = ImmutableMap.<PcapTypes, TypeProtos.MinorType>builder() .put(PcapTypes.STRING, MinorType.VARCHAR) .put(PcapTypes.BYTES, MinorType.VARBINARY) .put(PcapTypes.INTEGER, MinorType.INT) .put(PcapTypes.LONG, MinorType.BIGINT) .put(PcapTypes.TIMESTAMP, MinorType.TIMESTAMP) .build(); } public PcapRecordReader(final String inputPath, final List<SchemaPath> projectedColumns) { try { this.in = new FileInputStream(inputPath); this.decoder = getPacketDecoder(); validBytes = in.read(buffer); } catch (IOException e) { throw new RuntimeException("File " + inputPath + " not Found"); } setColumns(projectedColumns); } @Override public void setup(final OperatorContext context, final OutputMutator output) throws ExecutionSetupException { this.output = output; } @Override public int next() { projectedCols = getProjectedColsIfItNull(); try { return parsePcapFilesAndPutItToTable(); } catch (IOException io) { throw new RuntimeException("Trouble with reading packets in file!"); } } @Override public void close() throws Exception { } private PacketDecoder getPacketDecoder() { try { return new PacketDecoder(in); } catch (IOException io) { throw new RuntimeException("File Not Found or some I/O issue"); } } private ImmutableList<ProjectedColumnInfo> getProjectedColsIfItNull() { return projectedCols != null ? projectedCols : initCols(new Schema()); } private ImmutableList<ProjectedColumnInfo> initCols(final Schema schema) { ImmutableList.Builder<ProjectedColumnInfo> pciBuilder = ImmutableList.builder(); ColumnDto column; for (int i = 0; i < schema.getNumberOfColumns(); i++) { column = schema.getColumnByIndex(i); final String name = column.getColumnName(); final PcapTypes type = column.getColumnType(); TypeProtos.MinorType minorType = TYPES.get(type); ProjectedColumnInfo pci = getProjectedColumnInfo(column, name, minorType); pciBuilder.add(pci); } return pciBuilder.build(); } private ProjectedColumnInfo getProjectedColumnInfo(final ColumnDto column, final String name, final MinorType minorType) { TypeProtos.MajorType majorType = getMajorType(minorType); MaterializedField field = MaterializedField.create(name, majorType); ValueVector vector = getValueVector(minorType, majorType, field); return getProjectedColumnInfo(column, vector); } private ProjectedColumnInfo getProjectedColumnInfo(final ColumnDto column, final ValueVector vector) { ProjectedColumnInfo pci = new ProjectedColumnInfo(); pci.vv = vector; pci.pcapColumn = column; return pci; } private MajorType getMajorType(final MinorType minorType) { return Types.optional(minorType); } private ValueVector getValueVector(final MinorType minorType, final MajorType majorType, final MaterializedField field) { try { final Class<? extends ValueVector> clazz = TypeHelper.getValueVectorClass( minorType, majorType.getMode()); ValueVector vector = output.addField(field, clazz); vector.allocateNew(); return vector; } catch (SchemaChangeException sce) { throw new NullPointerException("The addition of this field is incompatible with this OutputMutator's capabilities"); } } private int parsePcapFilesAndPutItToTable() throws IOException { Packet packet = new Packet(); while (offset < validBytes) { if (validBytes - offset < 9000) { System.arraycopy(buffer, offset, buffer, 0, validBytes - offset); validBytes = validBytes - offset; offset = 0; int n = in.read(buffer, validBytes, buffer.length - validBytes); if (n > 0) { validBytes += n; } } offset = decoder.decodePacket(buffer, offset, packet); if (addDataToTable(packet, decoder.getNetwork())) { return 1; } } return 0; } private boolean addDataToTable(final Packet packet, final int networkType) { for (ProjectedColumnInfo pci : projectedCols) { switch (pci.pcapColumn.getColumnName()) { case "type": setStringColumnValue(packet.getPacketType(), pci); break; case "timestamp": setTimestampColumnValue(packet.getTimestamp(), pci); break; case "network": setIntegerColumnValue(networkType, pci); break; case "src_mac_address": setStringColumnValue(packet.getEthernetSource(), pci); break; case "dst_mac_address": setStringColumnValue(packet.getEthernetDestination(), pci); break; case "dst_ip": if (packet.getDst_ip() != null) { setStringColumnValue(packet.getDst_ip().getHostAddress(), pci); } else { setStringColumnValue(null, pci); } break; case "src_ip": if (packet.getSrc_ip() != null) { setStringColumnValue(packet.getSrc_ip().getHostAddress(), pci); } else { setStringColumnValue(null, pci); } break; case "src_port": setIntegerColumnValue(packet.getSrc_port(), pci); break; case "dst_port": setIntegerColumnValue(packet.getDst_port(), pci); break; case "tcp_session": if (packet.isTcpPacket()) { setLongColumnValue(packet.getSessionHash(), pci); } break; case "tcp_sequence": if (packet.isTcpPacket()) { setIntegerColumnValue(packet.getSequenceNumber(), pci); } break; case "packet_length": setIntegerColumnValue(packet.getPacketLength(), pci); break; case "raw_data": if (packet.getData() != null) { setBinaryColumnValue(packet.getData(), pci); } else { setStringColumnValue(null, pci); } break; case "data": if (packet.getData() != null) { setStringColumnValue(parseBytesToASCII(packet.getData()), pci); } else { setStringColumnValue("[]", pci); } break; } } return true; } private void setLongColumnValue(long data, ProjectedColumnInfo pci) { ((NullableBigIntVector.Mutator) pci.vv.getMutator()) .setSafe(0, data); } private String parseBytesToASCII(byte[] data) { return new String(data).trim() .replaceAll("\\P{Print}", "."); } private void setIntegerColumnValue(final int data, final ProjectedColumnInfo pci) { ((NullableIntVector.Mutator) pci.vv.getMutator()) .setSafe(0, data); } private void setTimestampColumnValue(final long data, final ProjectedColumnInfo pci) { ((NullableTimeStampVector.Mutator) pci.vv.getMutator()) .setSafe(0, data); } private void setBinaryColumnValue(byte[] data, final ProjectedColumnInfo pci) { UInt1Vector.Mutator mutator = ((UInt1Vector.Mutator) pci.vv.getMutator()); for (int i = 0; i < data.length; i++) { mutator.setSafe(i, data[i]); } } private void setStringColumnValue(final String data, final ProjectedColumnInfo pci) { if (data == null) { ((NullableVarCharVector.Mutator) pci.vv.getMutator()) .setNull(0); } else { ByteBuffer value = ByteBuffer.wrap(data.getBytes(UTF_8)); ((NullableVarCharVector.Mutator) pci.vv.getMutator()) .setSafe(0, value, 0, value.remaining()); } } }
package org.apache.helix.zookeeper.datamodel; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.TreeMap; import org.apache.helix.zookeeper.constant.ZkSystemPropertyKeys; import org.apache.helix.zookeeper.datamodel.serializer.JacksonPayloadSerializer; import org.apache.helix.zookeeper.datamodel.serializer.PayloadSerializer; import org.codehaus.jackson.annotate.JsonCreator; import org.codehaus.jackson.annotate.JsonIgnore; import org.codehaus.jackson.annotate.JsonIgnoreProperties; import org.codehaus.jackson.annotate.JsonProperty; import org.codehaus.jackson.map.annotate.JsonSerialize; import org.codehaus.jackson.map.annotate.JsonSerialize.Inclusion; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Generic Record Format to store data at a Node This can be used to store * simpleFields mapFields listFields */ @JsonIgnoreProperties(ignoreUnknown = true) @JsonSerialize(include = Inclusion.NON_NULL) public class ZNRecord { static Logger _logger = LoggerFactory.getLogger(ZNRecord.class); private final String id; @JsonIgnore(true) public static final String LIST_FIELD_BOUND = "listField.bound"; /** A field name in ZNRecord's simple fields to enable compression in ZNRecord serializers. */ @JsonIgnore public static final String ENABLE_COMPRESSION_BOOLEAN_FIELD = "enableCompression"; /** * Default value for system property * {@link ZkSystemPropertyKeys#ZK_SERIALIZER_ZNRECORD_AUTO_COMPRESS_ENABLED} */ @JsonIgnore public static final String ZK_SERIALIZER_ZNRECORD_AUTO_COMPRESS_DEFAULT = "true"; @JsonIgnore(true) public static final int SIZE_LIMIT = 1000 * 1024; // leave a margin out of 1M // We don't want the _deltaList to be serialized and deserialized private List<ZNRecordDelta> _deltaList = new ArrayList<ZNRecordDelta>(); private Map<String, String> simpleFields; private Map<String, Map<String, String>> mapFields; private Map<String, List<String>> listFields; private byte[] rawPayload; private PayloadSerializer _serializer; // the version field of zookeeper Stat private int _version; private long _creationTime; private long _modifiedTime; private long _ephemeralOwner; /** * Initialize with an identifier * @param id */ @JsonCreator public ZNRecord(@JsonProperty("id") String id) { this.id = id; simpleFields = new TreeMap<>(); mapFields = new TreeMap<>(); listFields = new TreeMap<>(); rawPayload = null; _serializer = new JacksonPayloadSerializer(); } /** * Initialize with a pre-populated ZNRecord * @param record */ public ZNRecord(ZNRecord record) { this(record, record.getId()); } /** * Initialize with a pre-populated ZNRecord, overwriting the identifier * @param record * @param id */ public ZNRecord(ZNRecord record, String id) { this(id); simpleFields.putAll(record.getSimpleFields()); mapFields.putAll(record.getMapFields()); listFields.putAll(record.getListFields()); if (record.rawPayload != null) { rawPayload = new byte[record.rawPayload.length]; System.arraycopy(record.rawPayload, 0, rawPayload, 0, record.rawPayload.length); } else { rawPayload = null; } _version = record.getVersion(); _creationTime = record.getCreationTime(); _modifiedTime = record.getModifiedTime(); _ephemeralOwner = record.getEphemeralOwner(); } /** * Set a custom {@link PayloadSerializer} to allow including arbitrary data * @param serializer */ @JsonIgnore(true) public void setPayloadSerializer(PayloadSerializer serializer) { _serializer = serializer; } /** * Set the list of updates to this ZNRecord * @param deltaList */ @JsonIgnore(true) public void setDeltaList(List<ZNRecordDelta> deltaList) { _deltaList = deltaList; } /** * Get the list of updates to this ZNRecord * @return list of {@link ZNRecordDelta} */ @JsonIgnore(true) public List<ZNRecordDelta> getDeltaList() { return _deltaList; } /** * Get all plain key, value fields * @return Map of simple fields */ @JsonProperty public Map<String, String> getSimpleFields() { return simpleFields; } /** * Set all plain key, value fields * @param simpleFields */ @JsonProperty public void setSimpleFields(Map<String, String> simpleFields) { this.simpleFields = simpleFields; } /** * Get all fields whose values are key, value properties * @return all map fields */ @JsonProperty public Map<String, Map<String, String>> getMapFields() { return mapFields; } /** * Set all fields whose values are key, value properties * @param mapFields */ @JsonProperty public void setMapFields(Map<String, Map<String, String>> mapFields) { this.mapFields = mapFields; } /** * Get all fields whose values are a list of values * @return all list fields */ @JsonProperty public Map<String, List<String>> getListFields() { return listFields; } /** * Set all fields whose values are a list of values * @param listFields */ @JsonProperty public void setListFields(Map<String, List<String>> listFields) { this.listFields = listFields; } /** * Set a simple key, value field * @param k * @param v */ @JsonProperty public void setSimpleField(String k, String v) { simpleFields.put(k, v); } /** * Set a value with the input key if the key is absent. * @param k * @param v */ @JsonProperty public void setSimpleFieldIfAbsent(String k, String v) { simpleFields.putIfAbsent(k, v); } @JsonProperty public String getId() { return id; } /** * Set arbitrary data serialized as a byte array payload. Consider using * {@link #setPayload(Object)} instead * @param payload */ @JsonProperty public void setRawPayload(byte[] payload) { rawPayload = payload; } /** * Get arbitrary data serialized as a byte array payload. Consider using * {@link #getPayload(Class)} instead * @return */ @JsonProperty public byte[] getRawPayload() { return rawPayload; } /** * Set a typed payload that will be serialized and persisted. * @param payload */ @JsonIgnore(true) public <T> void setPayload(T payload) { if (_serializer != null && payload != null) { rawPayload = _serializer.serialize(payload); } else { rawPayload = null; } } /** * Get a typed deserialized payload * @param clazz * @return */ @JsonIgnore(true) public <T> T getPayload(Class<T> clazz) { if (_serializer != null && rawPayload != null) { return _serializer.deserialize(clazz, rawPayload); } else { return null; } } /** * Set a single String --> Map field * @param k * @param v */ public void setMapField(String k, Map<String, String> v) { mapFields.put(k, v); } /** * Set a single String --> List field * @param k * @param v */ public void setListField(String k, List<String> v) { listFields.put(k, v); } /** * Get a single String field * @param k * @return String field */ public String getSimpleField(String k) { return simpleFields.get(k); } /** * Get a single Map field * @param k * @return String --> String map */ public Map<String, String> getMapField(String k) { return mapFields.get(k); } /** * Get a single List field * @param k * @return String list */ public List<String> getListField(String k) { return listFields.get(k); } /** * Set a single simple int field * @param k * @param v */ public void setIntField(String k, int v) { setSimpleField(k, Integer.toString(v)); } /** * Set a single simple int field with the input key if the key is absent. * @param k * @param v */ public void setIntFieldIfAbsent(String k, int v) { setSimpleFieldIfAbsent(k, Integer.toString(v)); } /** * Get a single int field * @param k * @param defaultValue * @return the int value, or defaultValue if not present */ public int getIntField(String k, int defaultValue) { int v = defaultValue; String valueStr = getSimpleField(k); if (valueStr != null) { try { v = Integer.parseInt(valueStr); } catch (NumberFormatException e) { _logger.warn("", e); } } return v; } /** * Set a single simple long field * @param k * @param v */ public void setLongField(String k, long v) { setSimpleField(k, Long.toString(v)); } /** * Get a single long field * @param k * @param defaultValue * @return the long value, or defaultValue if not present */ public long getLongField(String k, long defaultValue) { long v = defaultValue; String valueStr = getSimpleField(k); if (valueStr != null) { try { v = Long.parseLong(valueStr); } catch (NumberFormatException e) { _logger.warn("", e); } } return v; } /** * Set a single simple double field * @param k * @param v */ public void setDoubleField(String k, double v) { setSimpleField(k, Double.toString(v)); } /** * Get a single double field * @param k * @param defaultValue * @return the double value, or defaultValue if not present */ public double getDoubleField(String k, double defaultValue) { double v = defaultValue; String valueStr = getSimpleField(k); if (valueStr != null) { try { v = Double.parseDouble(valueStr); } catch (NumberFormatException e) { _logger.warn("", e); } } return v; } /** * Set a single simple boolean field * @param k * @param v */ public void setBooleanField(String k, boolean v) { setSimpleField(k, Boolean.toString(v)); } /** * Set a single simple boolean field with the input key if the key is absent. * * @param k * @param v */ @JsonProperty public void setBooleanFieldIfAbsent(String k, boolean v) { setSimpleFieldIfAbsent(k, Boolean.toString(v)); } /** * Get a single boolean field * @param k * @param defaultValue * @return the boolean field, or defaultValue if not present */ public boolean getBooleanField(String k, boolean defaultValue) { boolean v = defaultValue; String valueStr = getSimpleField(k); if (valueStr != null) { // Boolean.parseBoolean() doesn't throw an exception if the string isn't a valid boolean. // Thus, a direct comparison is necessary to make sure the value is actually "true" or // "false" if (valueStr.equalsIgnoreCase(Boolean.TRUE.toString())) { v = true; } else if (valueStr.equalsIgnoreCase(Boolean.FALSE.toString())) { v = false; } } return v; } /** * Set a single simple Enum field * @param k * @param v */ public <T extends Enum<T>> void setEnumField(String k, T v) { setSimpleField(k, v.toString()); } /** * Get a single Enum field * @param k * @param enumType * @param defaultValue * @return the Enum field of enumType, or defaultValue if not present */ public <T extends Enum<T>> T getEnumField(String k, Class<T> enumType, T defaultValue) { T v = defaultValue; String valueStr = getSimpleField(k); if (valueStr != null) { try { v = Enum.valueOf(enumType, valueStr); } catch (NullPointerException e) { _logger.warn("", e); } catch (IllegalArgumentException e) { _logger.warn("", e); } } return v; } /** * Get a single String field * @param k * @param defaultValue * @return the String value, or defaultValue if not present */ public String getStringField(String k, String defaultValue) { String v = defaultValue; String valueStr = getSimpleField(k); if (valueStr != null) { v = valueStr; } return v; } @Override public String toString() { StringBuffer sb = new StringBuffer(); sb.append(id + ", "); if (simpleFields != null) { sb.append(simpleFields); } if (mapFields != null) { sb.append(mapFields); } if (listFields != null) { sb.append(listFields); } return sb.toString(); } /** * merge functionality is used to merge multiple znrecord into a single one. * This will make use of the id of each ZNRecord and append it to every key * thus making key unique. This is needed to optimize on the watches. * @param record */ public void merge(ZNRecord record) { if (record == null) { return; } if (record.getDeltaList().size() > 0) { _logger.info("Merging with delta list, recordId = " + id + " other:" + record.getId()); merge(record.getDeltaList()); return; } simpleFields.putAll(record.simpleFields); for (String key : record.mapFields.keySet()) { Map<String, String> map = mapFields.get(key); if (map != null) { map.putAll(record.mapFields.get(key)); } else { mapFields.put(key, record.mapFields.get(key)); } } for (String key : record.listFields.keySet()) { // Default merge logic could introduce duplicated values. For example, old Record has list field // with value [1, 2, 3]. New Record is exactly same as previous one. Merged result will be // [1, 2, 3, 1, 2, 3]. List<String> list = listFields.get(key); if (list != null) { list.addAll(record.listFields.get(key)); } else { listFields.put(key, record.listFields.get(key)); } } } /** * Replace functionality is used to update this ZNRecord with the given ZNRecord. The value of a * field in this record will be replaced with the value of the same field in given record if it * presents. If there is new field in given ZNRecord but not in this record, add that field into * this record. * The list fields and map fields will be replaced as a single entry. * * @param record */ public void update(ZNRecord record) { if (record != null) { simpleFields.putAll(record.simpleFields); listFields.putAll(record.listFields); mapFields.putAll(record.mapFields); } } /** * Merge in a {@link ZNRecordDelta} corresponding to its merge policy * @param delta */ void merge(ZNRecordDelta delta) { if (delta.getMergeOperation() == ZNRecordDelta.MergeOperation.ADD) { merge(delta.getRecord()); } else if (delta.getMergeOperation() == ZNRecordDelta.MergeOperation.SUBTRACT) { subtract(delta.getRecord()); } else if (delta.getMergeOperation() == ZNRecordDelta.MergeOperation.UPDATE) { update(delta.getRecord()); } } /** * Batch merge of {@link ZNRecordDelta} * @see #merge(ZNRecordDelta) * @param deltaList */ void merge(List<ZNRecordDelta> deltaList) { for (ZNRecordDelta delta : deltaList) { merge(delta); } } @Override public boolean equals(Object obj) { if (!(obj instanceof ZNRecord)) { return false; } ZNRecord that = (ZNRecord) obj; if (this.getSimpleFields().size() != that.getSimpleFields().size()) { return false; } if (this.getMapFields().size() != that.getMapFields().size()) { return false; } if (this.getListFields().size() != that.getListFields().size()) { return false; } if (!this.getSimpleFields().equals(that.getSimpleFields())) { return false; } if (!this.getMapFields().equals(that.getMapFields())) { return false; } if (!this.getListFields().equals(that.getListFields())) { return false; } return true; } /** * Subtract value from this ZNRecord * Note: does not support subtract in each list in list fields or map in * mapFields * @param value */ public void subtract(ZNRecord value) { for (String key : value.getSimpleFields().keySet()) { simpleFields.remove(key); } for (String key : value.getListFields().keySet()) { listFields.remove(key); } for (String key : value.getMapFields().keySet()) { Map<String, String> map = value.getMapField(key); if (map == null) { mapFields.remove(key); } else { Map<String, String> nestedMap = mapFields.get(key); if (nestedMap != null) { for (String mapKey : map.keySet()) { nestedMap.remove(mapKey); } if (nestedMap.size() == 0) { mapFields.remove(key); } } } } } /** * Get the version of this record * @return version number */ @JsonIgnore(true) public int getVersion() { return _version; } /** * Set the version of this record * @param version */ @JsonIgnore(true) public void setVersion(int version) { _version = version; } /** * Get the time that this record was created * @return UNIX timestamp */ @JsonIgnore(true) public long getCreationTime() { return _creationTime; } /** * Set the time that this record was created * @param creationTime */ @JsonIgnore(true) public void setCreationTime(long creationTime) { _creationTime = creationTime; } /** * Get the time that this record was last modified * @return UNIX timestamp */ @JsonIgnore(true) public long getModifiedTime() { return _modifiedTime; } /** * Set the time that this record was last modified * @param modifiedTime */ @JsonIgnore(true) public void setModifiedTime(long modifiedTime) { _modifiedTime = modifiedTime; } /** * Get the session Id of ephemeral node owner */ @JsonIgnore(true) public long getEphemeralOwner() { return _ephemeralOwner; } /** * Set the session Id of ephemeral node owner * @param ephemeralOwner */ @JsonIgnore(true) public void setEphemeralOwner(long ephemeralOwner) { _ephemeralOwner = ephemeralOwner; } }
/* * Copyright (c) 2014, Harald Kuhr * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * * Neither the name of the copyright holder nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.twelvemonkeys.imageio.plugins.pcx; import com.twelvemonkeys.imageio.AbstractMetadata; import javax.imageio.metadata.IIOMetadataNode; import java.awt.image.IndexColorModel; final class PCXMetadata extends AbstractMetadata { private final PCXHeader header; private final IndexColorModel vgaPalette; PCXMetadata(final PCXHeader header, final IndexColorModel vgaPalette) { this.header = header; this.vgaPalette = vgaPalette; } @Override protected IIOMetadataNode getStandardChromaNode() { IIOMetadataNode chroma = new IIOMetadataNode("Chroma"); IndexColorModel palette = null; boolean gray = false; IIOMetadataNode csType = new IIOMetadataNode("ColorSpaceType"); switch (header.getBitsPerPixel()) { case 1: case 2: case 4: palette = header.getEGAPalette(); csType.setAttribute("name", "RGB"); break; case 8: // We may have IndexColorModel here for 1 channel images if (header.getChannels() == 1 && vgaPalette != null) { palette = vgaPalette; csType.setAttribute("name", "RGB"); break; } if (header.getChannels() == 1) { csType.setAttribute("name", "GRAY"); gray = true; break; } csType.setAttribute("name", "RGB"); break; case 24: // Some sources says this is possible... Untested. csType.setAttribute("name", "RGB"); break; default: csType.setAttribute("name", "Unknown"); } chroma.appendChild(csType); // NOTE: Channels in chroma node reflects channels in color model, not data! (see data node) IIOMetadataNode numChannels = new IIOMetadataNode("NumChannels"); numChannels.setAttribute("value", gray ? "1" : "3"); chroma.appendChild(numChannels); IIOMetadataNode blackIsZero = new IIOMetadataNode("BlackIsZero"); blackIsZero.setAttribute("value", "TRUE"); chroma.appendChild(blackIsZero); if (palette != null) { IIOMetadataNode paletteNode = new IIOMetadataNode("Palette"); chroma.appendChild(paletteNode); for (int i = 0; i < palette.getMapSize(); i++) { IIOMetadataNode paletteEntry = new IIOMetadataNode("PaletteEntry"); paletteEntry.setAttribute("index", Integer.toString(i)); paletteEntry.setAttribute("red", Integer.toString(palette.getRed(i))); paletteEntry.setAttribute("green", Integer.toString(palette.getGreen(i))); paletteEntry.setAttribute("blue", Integer.toString(palette.getBlue(i))); paletteNode.appendChild(paletteEntry); } } return chroma; } // No compression @Override protected IIOMetadataNode getStandardCompressionNode() { if (header.getCompression() != PCX.COMPRESSION_NONE) { IIOMetadataNode node = new IIOMetadataNode("Compression"); IIOMetadataNode compressionTypeName = new IIOMetadataNode("CompressionTypeName"); compressionTypeName.setAttribute("value", header.getCompression() == PCX.COMPRESSION_RLE ? "RLE" : "Uknown"); node.appendChild(compressionTypeName); IIOMetadataNode lossless = new IIOMetadataNode("Lossless"); lossless.setAttribute("value", "TRUE"); node.appendChild(lossless); return node; } return null; } @Override protected IIOMetadataNode getStandardDataNode() { IIOMetadataNode node = new IIOMetadataNode("Data"); // Planar configuration only makes sense for multi-channel images if (header.getChannels() > 1) { IIOMetadataNode planarConfiguration = new IIOMetadataNode("PlanarConfiguration"); planarConfiguration.setAttribute("value", "LineInterleaved"); node.appendChild(planarConfiguration); } IIOMetadataNode sampleFormat = new IIOMetadataNode("SampleFormat"); switch (header.getBitsPerPixel()) { case 1: case 2: case 4: sampleFormat.setAttribute("value", "Index"); break; case 8: if (header.getChannels() == 1 && vgaPalette != null) { sampleFormat.setAttribute("value", "Index"); break; } // Else fall through for GRAY default: sampleFormat.setAttribute("value", "UnsignedIntegral"); break; } node.appendChild(sampleFormat); IIOMetadataNode bitsPerSample = new IIOMetadataNode("BitsPerSample"); bitsPerSample.setAttribute("value", createListValue(header.getChannels(), Integer.toString(header.getBitsPerPixel()))); node.appendChild(bitsPerSample); IIOMetadataNode significantBitsPerSample = new IIOMetadataNode("SignificantBitsPerSample"); significantBitsPerSample.setAttribute("value", createListValue(header.getChannels(), Integer.toString(header.getBitsPerPixel()))); node.appendChild(significantBitsPerSample); IIOMetadataNode sampleMSB = new IIOMetadataNode("SampleMSB"); sampleMSB.setAttribute("value", createListValue(header.getChannels(), "0")); return node; } private String createListValue(final int itemCount, final String... values) { StringBuilder buffer = new StringBuilder(); for (int i = 0; i < itemCount; i++) { if (buffer.length() > 0) { buffer.append(' '); } buffer.append(values[i % values.length]); } return buffer.toString(); } @Override protected IIOMetadataNode getStandardDimensionNode() { IIOMetadataNode dimension = new IIOMetadataNode("Dimension"); IIOMetadataNode imageOrientation = new IIOMetadataNode("ImageOrientation"); imageOrientation.setAttribute("value", "Normal"); dimension.appendChild(imageOrientation); return dimension; } @Override protected IIOMetadataNode getStandardDocumentNode() { IIOMetadataNode dimension = new IIOMetadataNode("Document"); IIOMetadataNode imageOrientation = new IIOMetadataNode("FormatVersion"); imageOrientation.setAttribute("value", String.valueOf(header.getVersion())); dimension.appendChild(imageOrientation); return dimension; } // No text node // No tiling @Override protected IIOMetadataNode getStandardTransparencyNode() { // NOTE: There doesn't seem to be any god way to determine transparency, other than by convention // 1 channel: Gray, 2 channel: Gray + Alpha, 3 channel: RGB, 4 channel: RGBA (hopefully never CMYK...) IIOMetadataNode transparency = new IIOMetadataNode("Transparency"); IIOMetadataNode alpha = new IIOMetadataNode("Alpha"); alpha.setAttribute("value", header.getChannels() == 1 || header.getChannels() == 3 ? "none" : "nonpremultiplied"); transparency.appendChild(alpha); return transparency; } }
/******************************************************************************* * Caleydo - Visualization for Molecular Biology - http://caleydo.org * Copyright (c) The Caleydo Team. All rights reserved. * Licensed under the new BSD license, available at http://caleydo.org/license ******************************************************************************/ package org.caleydo.util.r.filter; import org.caleydo.core.data.collection.Histogram; import org.caleydo.core.data.datadomain.ATableBasedDataDomain; import org.caleydo.core.data.filter.Filter; import org.caleydo.core.data.filter.MetaFilter; import org.caleydo.core.data.filter.event.RemoveFilterEvent; import org.caleydo.core.data.filter.representation.AFilterRepresentation; import org.caleydo.core.data.perspective.table.TablePerspective; import org.caleydo.core.data.virtualarray.VirtualArray; import org.caleydo.core.data.virtualarray.delta.VADeltaItem; import org.caleydo.core.data.virtualarray.delta.VirtualArrayDelta; import org.caleydo.core.event.EventPublisher; import org.caleydo.core.manager.GeneralManager; import org.caleydo.core.view.ViewManager; import org.caleydo.view.histogram.GLHistogram; import org.caleydo.view.histogram.RcpGLHistogramView; import org.eclipse.swt.SWT; import org.eclipse.swt.events.KeyAdapter; import org.eclipse.swt.events.KeyEvent; import org.eclipse.swt.events.MouseAdapter; import org.eclipse.swt.events.MouseEvent; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.layout.FillLayout; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Shell; import org.eclipse.swt.widgets.Slider; import org.eclipse.swt.widgets.Text; public class FilterRepresentationPValue extends AFilterRepresentation { private final static String TITLE = "Variance Filter"; private ATableBasedDataDomain dataDomain; private TablePerspective tablePerspective1; private Histogram histogram; private float pValue = -1; private float pValueMax = -1; @Override public synchronized boolean create() { if (!super.create()) return false; Display.getDefault().asyncExec(new Runnable() { @Override public void run() { ((Shell) parentComposite).setText(TITLE); GridData gridData = new GridData(); gridData.grabExcessHorizontalSpace = true; gridData.horizontalAlignment = GridData.FILL; Composite infoComposite = new Composite(parentComposite, SWT.NULL); infoComposite.setLayoutData(gridData); infoComposite.setLayout(new GridLayout(4, false)); Label pValueLabel = new Label(infoComposite, SWT.NONE); pValueLabel.setText("p-Value:"); final Text pValueInputField = new Text(infoComposite, SWT.SINGLE); final Slider pValueSlider = new Slider(infoComposite, SWT.HORIZONTAL); if (pValue == -1) { pValueMax = 0; // FIXME was histogram.getMax() pValue = pValueMax; } gridData = new GridData(); gridData.grabExcessHorizontalSpace = true; gridData.horizontalAlignment = GridData.FILL; pValueSlider.setLayoutData(gridData); pValueSlider.setSelection((int) (pValue * 10000)); pValueInputField.setEditable(true); pValueInputField.setText(Float.toString(pValue)); pValueInputField.addKeyListener(new KeyAdapter() { @Override public void keyPressed(KeyEvent e) { String enteredValue = pValueInputField.getText(); pValue = new Float(enteredValue); pValueSlider.setSelection((int) (pValue * 10000)); isDirty = true; } }); pValueSlider.setMinimum(0); pValueSlider.setMaximum((int) (pValueMax * 10000)); pValueSlider.setIncrement(1); pValueSlider.setPageIncrement(5); pValueSlider.setSelection((int) (pValue * 10000)); pValueSlider.addMouseListener(new MouseAdapter() { @Override public void mouseUp(MouseEvent e) { pValue = pValueSlider.getSelection() / 10000.00f; pValueInputField.setText(Float.toString(pValue)); isDirty = true; parentComposite.pack(); parentComposite.layout(); // if (reducedVA != null) // reducedNumberLabel.setText("# Genes: " + // reducedVA.size()); } }); final Button applyFilterButton = new Button(infoComposite, SWT.PUSH); applyFilterButton.setText("Apply"); applyFilterButton.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { applyFilter(); } }); Composite histoComposite = new Composite(parentComposite, SWT.NULL); histoComposite.setLayout(new FillLayout(SWT.VERTICAL)); gridData = new GridData(); gridData.heightHint = 300; gridData.widthHint = 500; // gridData.verticalAlignment = GridData.FILL; // gridData2.grabExcessVerticalSpace = true; histoComposite.setLayoutData(gridData); RcpGLHistogramView histogramView = new RcpGLHistogramView(); histogramView.setDataDomain(dataDomain); histogramView.createDefaultSerializedView(); histogramView.createPartControl(histoComposite); ((GLHistogram) (histogramView.getGLView())).setHistogram(histogram); // Usually the canvas is registered to the GL2 animator in the // PartListener. // Because the GL2 histogram is no usual RCP view we have to do // it on our // own ViewManager.get() .registerGLCanvasToAnimator(histogramView.getGLCanvas()); } }); addOKCancel(); return true; } public void setHistogram(Histogram histogram) { this.histogram = histogram; } @Override protected void createVADelta() { if (filter instanceof MetaFilter) { for (Filter subFilter : ((MetaFilter) filter).getFilterList()) { createVADelta(subFilter); } } else createVADelta(filter); } private void createVADelta(Filter subFilter) { VirtualArrayDelta recordVADelta = new VirtualArrayDelta(tablePerspective1 .getRecordPerspective().getPerspectiveID(), subFilter.getDataDomain() .getRecordIDType()); VirtualArray recordVA = tablePerspective1.getRecordPerspective() .getVirtualArray(); double[] tTestResult = tablePerspective1.getContainerStatistics().getTTest() .getOneSidedTTestResult();// ((FilterRepresentationPValue) // subFilter.getFilterRep()) for (int recordIndex = 0; recordIndex < recordVA.size(); recordIndex++) { if (tTestResult != null && tTestResult[recordIndex] > pValue) recordVADelta.add(VADeltaItem.removeElement(recordVA.get(recordIndex))); } subFilter.setVADelta(recordVADelta); } @Override protected void triggerRemoveFilterEvent() { RemoveFilterEvent filterEvent = new RemoveFilterEvent(); filterEvent.setEventSpace(filter.getDataDomain().getDataDomainID()); filterEvent.setFilter(filter); EventPublisher.trigger(filterEvent); } @Override protected void applyFilter() { if (isDirty) { createVADelta(); filter.updateFilterManager(); } isDirty = false; } /** * @param dataDomain * setter, see {@link #dataDomain} */ public void setDataDomain(ATableBasedDataDomain dataDomain) { this.dataDomain = dataDomain; } public void setTablePerspective1(TablePerspective tablePerspective1) { this.tablePerspective1 = tablePerspective1; } }
/** * OWASP Enterprise Security API (ESAPI) * * This file is part of the Open Web Application Security Project (OWASP) * Enterprise Security API (ESAPI) project. For details, please see * <a href="http://www.owasp.org/index.php/ESAPI">http://www.owasp.org/index.php/ESAPI</a>. * * Copyright (c) 2007 - The OWASP Foundation * * The ESAPI is published by OWASP under the BSD license. You should read and accept the * LICENSE before you use, modify, and/or redistribute this software. * * @author Jeff Williams <a href="http://www.aspectsecurity.com">Aspect Security</a> * @created 2007 */ package org.owasp.esapi; import org.owasp.esapi.errors.*; import javax.servlet.ServletException; import javax.servlet.http.Cookie; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import java.io.File; import java.io.IOException; import java.util.List; import java.util.Map; /** * The HTTPUtilities interface is a collection of methods that provide additional security related to HTTP requests, * responses, sessions, cookies, headers, and logging. * * @author Jeff Williams (jeff.williams .at. aspectsecurity.com) <a href="http://www.aspectsecurity.com">Aspect Security</a> * @since June 1, 2007 */ public interface HTTPUtilities { final static String REMEMBER_TOKEN_COOKIE_NAME = "rtoken"; final static int MAX_COOKIE_LEN = 4096; // From RFC 2109 final static int MAX_COOKIE_PAIRS = 20; // From RFC 2109 final static String CSRF_TOKEN_NAME = "ctoken"; final static String ESAPI_STATE = "estate"; final static int PARAMETER = 0; final static int HEADER = 1; final static int COOKIE = 2; /** * Calls addCookie with the *current* request. * * @see {@link HTTPUtilities#setCurrentHTTP(HttpServletRequest, HttpServletResponse)} */ void addCookie(Cookie cookie); /** * Add a cookie to the response after ensuring that there are no encoded or * illegal characters in the name and name and value. This method also sets * the secure and HttpOnly flags on the cookie. * * @param cookie */ void addCookie(HttpServletResponse response, Cookie cookie); /** * Adds the current user's CSRF token (see User.getCSRFToken()) to the URL for purposes of preventing CSRF attacks. * This method should be used on all URLs to be put into all links and forms the application generates. * * @param href the URL to which the CSRF token will be appended * @return the updated URL with the CSRF token parameter added */ String addCSRFToken(String href); /** * Calls addHeader with the *current* request. * * @see {@link HTTPUtilities#setCurrentHTTP(HttpServletRequest, HttpServletResponse)} */ void addHeader(String name, String value); /** * Add a header to the response after ensuring that there are no encoded or * illegal characters in the name and name and value. This implementation * follows the following recommendation: "A recipient MAY replace any linear * white space with a single SP before interpreting the field value or * forwarding the message downstream." * http://www.w3.org/Protocols/rfc2616/rfc2616-sec2.html#sec2.2 * * @param name * @param value */ void addHeader(HttpServletResponse response, String name, String value); /** * Calls assertSecureRequest with the *current* request. * @see {@link HTTPUtilities#assertSecureRequest(HttpServletRequest)} * @see {@link HTTPUtilities#setCurrentHTTP(HttpServletRequest, HttpServletResponse)} */ void assertSecureRequest() throws AccessControlException; /** * Calls assertSecureChannel with the *current* request. * @see {@link HTTPUtilities#assertSecureChannel(HttpServletRequest)} * @see {@link HTTPUtilities#setCurrentHTTP(HttpServletRequest, HttpServletResponse)} */ void assertSecureChannel() throws AccessControlException; /** * Ensures that the request uses both SSL and POST to protect any sensitive parameters * in the querystring from being sniffed, logged, bookmarked, included in referer header, etc... * This method should be called for any request that contains sensitive data from a web form. * * @param request * @throws AccessControlException if security constraints are not met */ void assertSecureRequest(HttpServletRequest request) throws AccessControlException; /** * Ensures the use of SSL to protect any sensitive parameters in the request and * any sensitive data in the response. This method should be called for any request * that contains sensitive data from a web form or will result in sensitive data in the * response page. * * @param request * @throws AccessControlException if security constraints are not met */ void assertSecureChannel(HttpServletRequest request) throws AccessControlException; /** * Calls changeSessionIdentifier with the *current* request. * * @see {@link HTTPUtilities#setCurrentHTTP(HttpServletRequest, HttpServletResponse)} */ HttpSession changeSessionIdentifier() throws AuthenticationException; /** * Invalidate the existing session after copying all of its contents to a newly created session with a new session id. * Note that this is different from logging out and creating a new session identifier that does not contain the * existing session contents. Care should be taken to use this only when the existing session does not contain * hazardous contents. * * @param request * @return the new HttpSession with a changed id * @throws AuthenticationException the exception */ HttpSession changeSessionIdentifier(HttpServletRequest request) throws AuthenticationException; /** * Clears the current HttpRequest and HttpResponse associated with the current thread. * * @see ESAPI#clearCurrent() */ void clearCurrent(); /** * Decrypts an encrypted hidden field value and returns the cleartext. If the field does not decrypt properly, * an IntrusionException is thrown to indicate tampering. * * @param encrypted hidden field value to decrypt * @return decrypted hidden field value stored as a String */ String decryptHiddenField(String encrypted); /** * Takes an encrypted querystring and returns a Map containing the original parameters. * * @param encrypted the encrypted querystring to decrypt * @return a Map object containing the decrypted querystring * @throws EncryptionException */ Map<String, String> decryptQueryString(String encrypted) throws EncryptionException; /** * Calls decryptStateFromCookie with the *current* request. * * @see {@link HTTPUtilities#setCurrentHTTP(HttpServletRequest, HttpServletResponse)} */ Map<String, String> decryptStateFromCookie() throws EncryptionException; /** * Retrieves a map of data from a cookie encrypted with encryptStateInCookie(). * * @param request * @return a map containing the decrypted cookie state value * @throws EncryptionException */ Map<String, String> decryptStateFromCookie(HttpServletRequest request) throws EncryptionException; /** * Encrypts a hidden field value for use in HTML. * * @param value the cleartext value of the hidden field * @return the encrypted value of the hidden field * @throws EncryptionException */ String encryptHiddenField(String value) throws EncryptionException; /** * Takes a querystring (everything after the question mark in the URL) and returns an encrypted string containing the parameters. * * @param query the querystring to encrypt * @return encrypted querystring stored as a String * @throws EncryptionException */ String encryptQueryString(String query) throws EncryptionException; /** * Calls encryptStateInCookie with the *current* response. * * @see {@link HTTPUtilities#setCurrentHTTP(HttpServletRequest, HttpServletResponse)} */ void encryptStateInCookie(Map<String, String> cleartext) throws EncryptionException; /** * Stores a Map of data in an encrypted cookie. Generally the session is a better * place to store state information, as it does not expose it to the user at all. * If there is a requirement not to use sessions, or the data should be stored * across sessions (for a long time), the use of encrypted cookies is an effective * way to prevent the exposure. * * @param response * @param cleartext * @throws EncryptionException */ void encryptStateInCookie(HttpServletResponse response, Map<String, String> cleartext) throws EncryptionException; /** * Calls getCookie with the *current* response. * * @see {@link HTTPUtilities#setCurrentHTTP(HttpServletRequest, HttpServletResponse)} */ String getCookie(String name) throws ValidationException; /** * A safer replacement for getCookies() in HttpServletRequest that returns the canonicalized * value of the named cookie after "global" validation against the * general type defined in ESAPI.properties. This should not be considered a replacement for * more specific validation. * * @param request * @param name * @return the requested cookie value */ String getCookie(HttpServletRequest request, String name) throws ValidationException; /** * Returns the current user's CSRF token. If there is no current user then return null. * * @return the current users CSRF token */ String getCSRFToken(); /** * Retrieves the current HttpServletRequest * * @return the current request */ HttpServletRequest getCurrentRequest(); /** * Retrieves the current HttpServletResponse * * @return the current response */ HttpServletResponse getCurrentResponse(); /** * Calls getFileUploads with the *current* request, default upload directory, and default allowed file extensions * * @see {@link HTTPUtilities#setCurrentHTTP(HttpServletRequest, HttpServletResponse)} */ List getFileUploads() throws ValidationException; /** * Call getFileUploads with the specified request, default upload directory, and default allowed file extensions */ List getFileUploads(HttpServletRequest request) throws ValidationException; /** * Call getFileUploads with the specified request, specified upload directory, and default allowed file extensions */ List getFileUploads(HttpServletRequest request, File finalDir) throws ValidationException; /** * Extract uploaded files from a multipart HTTP requests. Implementations must check the content to ensure that it * is safe before making a permanent copy on the local filesystem. Checks should include length and content checks, * possibly virus checking, and path and name checks. Refer to the file checking methods in Validator for more * information. * <p/> * This method uses {@link HTTPUtilities#getCurrentRequest()} to obtain the {@link HttpServletRequest} object * * @param request * @return List of new File objects from upload * @throws ValidationException if the file fails validation */ List getFileUploads(HttpServletRequest request, File destinationDir, List allowedExtensions) throws ValidationException; /** * Calls getHeader with the *current* request. * * @see {@link HTTPUtilities#setCurrentHTTP(HttpServletRequest, HttpServletResponse)} */ String getHeader(String name) throws ValidationException; /** * A safer replacement for getHeader() in HttpServletRequest that returns the canonicalized * value of the named header after "global" validation against the * general type defined in ESAPI.properties. This should not be considered a replacement for * more specific validation. * * @param request * @param name * @return the requested header value */ String getHeader(HttpServletRequest request, String name) throws ValidationException; /** * Calls getParameter with the *current* request. * * @see {@link HTTPUtilities#setCurrentHTTP(HttpServletRequest, HttpServletResponse)} */ String getParameter(String name) throws ValidationException; /** * A safer replacement for getParameter() in HttpServletRequest that returns the canonicalized * value of the named parameter after "global" validation against the * general type defined in ESAPI.properties. This should not be considered a replacement for * more specific validation. * * @param request * @param name * @return the requested parameter value */ String getParameter(HttpServletRequest request, String name) throws ValidationException; /** * Calls killAllCookies with the *current* request and response. * * @see {@link HTTPUtilities#setCurrentHTTP(HttpServletRequest, HttpServletResponse)} */ void killAllCookies(); /** * Kill all cookies received in the last request from the browser. Note that new cookies set by the application in * this response may not be killed by this method. * * @param request * @param response */ void killAllCookies(HttpServletRequest request, HttpServletResponse response); /** * Calls killCookie with the *current* request and response. * * @see {@link HTTPUtilities#setCurrentHTTP(HttpServletRequest, HttpServletResponse)} */ void killCookie(String name); /** * Kills the specified cookie by setting a new cookie that expires immediately. Note that this * method does not delete new cookies that are being set by the application for this response. * * @param request * @param name * @param response */ void killCookie(HttpServletRequest request, HttpServletResponse response, String name); /** * Calls logHTTPRequest with the *current* request and logger. * * @see {@link HTTPUtilities#setCurrentHTTP(HttpServletRequest, HttpServletResponse)} */ void logHTTPRequest(); /** * Format the Source IP address, URL, URL parameters, and all form * parameters into a string suitable for the log file. Be careful not * to log sensitive information, and consider masking with the * logHTTPRequest( List parameterNamesToObfuscate ) method. * * @param request * @param logger the logger to write the request to */ void logHTTPRequest(HttpServletRequest request, Logger logger); /** * Format the Source IP address, URL, URL parameters, and all form * parameters into a string suitable for the log file. The list of parameters to * obfuscate should be specified in order to prevent sensitive information * from being logged. If a null list is provided, then all parameters will * be logged. If HTTP request logging is done in a central place, the * parameterNamesToObfuscate could be made a configuration parameter. We * include it here in case different parts of the application need to obfuscate * different parameters. * * @param request * @param logger the logger to write the request to * @param parameterNamesToObfuscate the sensitive parameters */ void logHTTPRequest(HttpServletRequest request, Logger logger, List parameterNamesToObfuscate); /** * Calls sendForward with the *current* request and response. * * @see {@link HTTPUtilities#setCurrentHTTP(HttpServletRequest, HttpServletResponse)} */ void sendForward(String location) throws AccessControlException, ServletException, IOException; /** * This method performs a forward to any resource located inside the WEB-INF directory. Forwarding to * publicly accessible resources can be dangerous, as the request will have already passed the URL * based access control check. This method ensures that you can only forward to non-publicly * accessible resources. * * @param request * @param response * @param location the URL to forward to, including parameters * @throws AccessControlException * @throws ServletException * @throws IOException */ void sendForward(HttpServletRequest request, HttpServletResponse response, String location) throws AccessControlException, ServletException, IOException; /** * Calls sendRedirect with the *current* response. * * @see {@link HTTPUtilities#setCurrentHTTP(HttpServletRequest, HttpServletResponse)} */ void sendRedirect(String location) throws AccessControlException, IOException; /** * This method performs a forward to any resource located inside the WEB-INF directory. Forwarding to * publicly accessible resources can be dangerous, as the request will have already passed the URL * based access control check. This method ensures that you can only forward to non-publicly * accessible resources. * * @param response * @param location the URL to forward to, including parameters * @throws AccessControlException * @throws ServletException * @throws IOException */ void sendRedirect(HttpServletResponse response, String location) throws AccessControlException, IOException; /** * Calls setContentType with the *current* request and response. * * @see {@link HTTPUtilities#setCurrentHTTP(HttpServletRequest, HttpServletResponse)} */ void setContentType(); /** * Set the content type character encoding header on every HttpServletResponse in order to limit * the ways in which the input data can be represented. This prevents * malicious users from using encoding and multi-byte escape sequences to * bypass input validation routines. * <p/> * Implementations of this method should set the content type header to a safe value for your environment. * The default is text/html; charset=UTF-8 character encoding, which is the default in early * versions of HTML and HTTP. See RFC 2047 (http://ds.internic.net/rfc/rfc2045.txt) for more * information about character encoding and MIME. * <p/> * The DefaultHTTPUtilities reference implementation sets the content type as specified. * * @param response The servlet response to set the content type for. */ void setContentType(HttpServletResponse response); /** * Stores the current HttpRequest and HttpResponse so that they may be readily accessed throughout * ESAPI (and elsewhere) * * @param request the current request * @param response the current response */ void setCurrentHTTP(HttpServletRequest request, HttpServletResponse response); /** * Calls setHeader with the *current* response. * * @see {@link HTTPUtilities#setCurrentHTTP(HttpServletRequest, HttpServletResponse)} */ void setHeader(String name, String value); /** * Add a header to the response after ensuring that there are no encoded or * illegal characters in the name and value. "A recipient MAY replace any * linear white space with a single SP before interpreting the field value * or forwarding the message downstream." * http://www.w3.org/Protocols/rfc2616/rfc2616-sec2.html#sec2.2 * * @param name * @param value */ void setHeader(HttpServletResponse response, String name, String value); /** * Calls setNoCacheHeaders with the *current* response. * * @see {@link HTTPUtilities#setCurrentHTTP(HttpServletRequest, HttpServletResponse)} */ void setNoCacheHeaders(); /** * Set headers to protect sensitive information against being cached in the browser. Developers should make this * call for any HTTP responses that contain any sensitive data that should not be cached within the browser or any * intermediate proxies or caches. Implementations should set headers for the expected browsers. The safest approach * is to set all relevant headers to their most restrictive setting. These include: * <p/> * <PRE> * Cache-Control: no-store<BR> * Cache-Control: no-cache<BR> * Cache-Control: must-revalidate<BR> * Expires: -1<BR> * </PRE> * <p/> * Note that the header "pragma: no-cache" is intended only for use in HTTP requests, not HTTP responses. However, Microsoft has chosen to * directly violate the standards, so we need to include that header here. For more information, please refer to the relevant standards: * <UL> * <LI><a href="http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.9.1">HTTP/1.1 Cache-Control "no-cache"</a> * <LI><a href="http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.9.2">HTTP/1.1 Cache-Control "no-store"</a> * <LI><a href="http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.32">HTTP/1.0 Pragma "no-cache"</a> * <LI><a href="http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.21">HTTP/1.0 Expires</a> * <LI><a href="http://support.microsoft.com/kb/937479">IE6 Caching Issues</a> * <LI><a href="http://support.microsoft.com/kb/234067">Microsoft directly violates specification for pragma: no-cache</a> * <LI><a href="https://developer.mozilla.org/en-US/docs/Mozilla/Preferences/Mozilla_networking_preferences#Cache">Firefox browser.cache.disk_cache_ssl</a> * <LI><a href="https://developer.mozilla.org/en-US/docs/Mozilla/Preferences/Mozilla_networking_preferences">Mozilla</a> * </UL> * * @param response */ void setNoCacheHeaders(HttpServletResponse response); /** * Calls setNoCacheHeaders with the *current* response. * * @see {@link HTTPUtilities#setCurrentHTTP(HttpServletRequest, HttpServletResponse)} */ String setRememberToken(String password, int maxAge, String domain, String path); /** * Set a cookie containing the current User's remember me token for automatic authentication. The use of remember me tokens * is generally not recommended, but this method will help do it as safely as possible. The user interface should strongly warn * the user that this should only be enabled on computers where no other users will have access. * <p/> * Implementations should save the user's remember me data in an encrypted cookie and send it to the user. * Any old remember me cookie should be destroyed first. Setting this cookie should keep the user * logged in until the maxAge passes, the password is changed, or the cookie is deleted. * If the cookie exists for the current user, it should automatically be used by ESAPI to * log the user in, if the data is valid and not expired. * <p/> * The ESAPI reference implementation, DefaultHTTPUtilities.setRememberToken() implements all these suggestions. * <p/> * The username can be retrieved with: User username = ESAPI.authenticator().getCurrentUser(); * * @param request * @param password the user's password * @param response * @param maxAge the length of time that the token should be valid for in relative seconds * @param domain the domain to restrict the token to or null * @param path the path to restrict the token to or null * @return encrypted "Remember Me" token stored as a String */ String setRememberToken(HttpServletRequest request, HttpServletResponse response, String password, int maxAge, String domain, String path); /** * Calls verifyCSRFToken with the *current* request. * * @see {@link HTTPUtilities#setCurrentHTTP(HttpServletRequest, HttpServletResponse)} */ void verifyCSRFToken(); /** * Checks the CSRF token in the URL (see User.getCSRFToken()) against the user's CSRF token and * throws an IntrusionException if it is missing. * * @param request * @throws IntrusionException if CSRF token is missing or incorrect */ void verifyCSRFToken(HttpServletRequest request) throws IntrusionException; /** * Gets a typed attribute from the session associated with the calling thread. If the * object referenced by the passed in key is not of the implied type, a ClassCastException * will be thrown to the calling code. * * @param key * The key that references the session attribute * @param <T> * The implied type of object expected. * @return * The requested object. * @see #getSessionAttribute(javax.servlet.http.HttpSession, String) */ <T> T getSessionAttribute( String key ); /** * Gets a typed attribute from the passed in session. This method has the same * responsibility as {link #getSessionAttribute(String} however only it references * the passed in session and thus performs slightly better since it does not need * to return to the Thread to get the {@link HttpSession} associated with the current * thread. * * @param session * The session to retrieve the attribute from * @param key * The key that references the requested object * @param <T> * The implied type of object expected * @return The requested object */ <T> T getSessionAttribute( HttpSession session, String key ); /** * Gets a typed attribute from the {@link HttpServletRequest} associated * with the caller thread. If the attribute on the request is not of the implied * type, a ClassCastException will be thrown back to the caller. * * @param key The key that references the request attribute. * @param <T> The implied type of the object expected * @return The requested object */ <T> T getRequestAttribute( String key ); /** * Gets a typed attribute from the {@link HttpServletRequest} associated * with the passed in request. If the attribute on the request is not of the implied * type, a ClassCastException will be thrown back to the caller. * * @param request The request to retrieve the attribute from * @param key The key that references the request attribute. * @param <T> The implied type of the object expected * @return The requested object */ <T> T getRequestAttribute( HttpServletRequest request, String key ); }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package gobblin.crypto; import java.io.File; import java.io.FileOutputStream; import java.io.FileWriter; import java.io.IOException; import java.io.OutputStreamWriter; import java.nio.charset.StandardCharsets; import java.security.KeyStoreException; import java.util.Arrays; import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.DefaultParser; import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import javax.xml.bind.DatatypeConverter; import lombok.extern.slf4j.Slf4j; import gobblin.annotation.Alias; import gobblin.crypto.JCEKSKeystoreCredentialStore; import gobblin.runtime.cli.CliApplication; @Alias(value = "keystore", description = "Examine JCE Keystore files") @Slf4j public class JCEKSKeystoreCredentialStoreCli implements CliApplication { private static final Map<String, Action> actionMap = ImmutableMap .of("generate_keys", new GenerateKeyAction(), "list_keys", new ListKeysAction(), "help", new HelpAction(), "export", new ExportKeyAction()); @Override public void run(String[] args) { if (args.length < 2) { System.out.println("Must specify an action!"); new HelpAction().run(args); return; } String actionStr = args[1]; Action action = actionMap.get(actionStr); if (action == null) { System.out.println("Action " + actionStr + " unknown!"); new HelpAction().run(args); return; } action.run(Arrays.copyOfRange(args, 1, args.length)); } public static JCEKSKeystoreCredentialStore loadKeystore(String path) throws IOException { char[] password = getPasswordFromConsole(); return new JCEKSKeystoreCredentialStore(path, String.valueOf(password)); } /** * Abstract class for any action of this tool */ static abstract class Action { /** * Return any additional Options for this action. The framework will always add a 'help' option. */ protected abstract List<Option> getExtraOptions(); /** * Execute the action * @param args */ abstract void run(String[] args); protected static final Option HELP = Option.builder("h").longOpt("help").desc("Print usage").build(); protected void printUsage() { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("Options", getOptions()); } /** * Helper function to parse CLI arguments */ protected CommandLine parseOptions(String[] args) throws ParseException { CommandLineParser parser = new DefaultParser(); return parser.parse(getOptions(), args); } private Options getOptions() { List<Option> options = getExtraOptions(); Options optionList = new Options(); optionList.addOption(HELP); for (Option o : options) { optionList.addOption(o); } return optionList; } } static class HelpAction extends Action { @Override protected List<Option> getExtraOptions() { return Collections.emptyList(); } @Override void run(String[] args) { System.out.println("You can run <actionName> -h to see valid flags for a given action"); for (String validAction : actionMap.keySet()) { System.out.println(validAction); } } } /** * Check how many keys are present in an existing keystore. */ static class ListKeysAction extends Action { private static final Option KEYSTORE_LOCATION = Option.builder("o").longOpt("out").hasArg().desc("Keystore location").build(); private static final List<Option> options = ImmutableList.of(KEYSTORE_LOCATION); @Override protected List<Option> getExtraOptions() { return options; } @Override void run(String[] args) { try { CommandLine cli = parseOptions(args); if (!paramsAreValid(cli)) { return; } String keystoreLocation = cli.getOptionValue(KEYSTORE_LOCATION.getOpt()); JCEKSKeystoreCredentialStore credentialStore = loadKeystore(keystoreLocation); Map<String, byte[]> keys = credentialStore.getAllEncodedKeys(); System.out.println("Keystore " + keystoreLocation + " has " + String.valueOf(keys.size()) + " keys."); } catch (IOException | ParseException e) { throw new RuntimeException(e); } } private boolean paramsAreValid(CommandLine cli) { if (cli.hasOption(HELP.getOpt())) { printUsage(); return false; } if (!cli.hasOption(KEYSTORE_LOCATION.getOpt())) { System.out.println("Must specify keystore location!"); printUsage(); return false; } return true; } } /** * Create a new keystore file with _N_ serialized keys. The password will be read from the console. */ static class GenerateKeyAction extends Action { private static final Option KEYSTORE_LOCATION = Option.builder("o").longOpt("out").hasArg().desc("Keystore location").build(); private static final Option NUM_KEYS = Option.builder("n").longOpt("numKeys").hasArg().desc("# of keys to generate").build(); private static final List<Option> OPTIONS = ImmutableList.of(KEYSTORE_LOCATION, NUM_KEYS); @Override protected List<Option> getExtraOptions() { return OPTIONS; } @Override void run(String[] args) { try { CommandLine cli = parseOptions(args); if (!paramsAreValid(cli)) { return; } int numKeys = Integer.parseInt(cli.getOptionValue(NUM_KEYS.getOpt(), "20")); char[] password = getPasswordFromConsole(); String keystoreLocation = cli.getOptionValue(KEYSTORE_LOCATION.getOpt()); JCEKSKeystoreCredentialStore credentialStore = new JCEKSKeystoreCredentialStore(cli.getOptionValue(KEYSTORE_LOCATION.getOpt()), String.valueOf(password), EnumSet.of(JCEKSKeystoreCredentialStore.CreationOptions.CREATE_IF_MISSING)); credentialStore.generateAesKeys(numKeys, 0); System.out.println("Generated " + String.valueOf(numKeys) + " keys at " + keystoreLocation); } catch (IOException | KeyStoreException e) { throw new RuntimeException(e); } catch (ParseException e) { System.out.println("Unknown command line params " + e.toString()); printUsage(); } } private boolean paramsAreValid(CommandLine cli) { if (cli.hasOption(HELP.getOpt())) { printUsage(); return false; } if (!cli.hasOption(KEYSTORE_LOCATION.getOpt())) { System.out.println("Must specify keystore location!"); printUsage(); return false; } return true; } } public static char[] getPasswordFromConsole() { System.out.print("Please enter the keystore password: "); return System.console().readPassword(); } static class ExportKeyAction extends Action { private static final Option KEYSTORE_LOCATION = Option.builder("i").longOpt("in").hasArg().required().desc("Keystore location").build(); private static final Option OUTPUT_LOCATION = Option.builder("o").longOpt("out").hasArg().required().desc("Output location").build(); @Override protected List<Option> getExtraOptions() { return ImmutableList.of(KEYSTORE_LOCATION, OUTPUT_LOCATION); } @Override void run(String[] args) { try { CommandLine cli = parseOptions(args); JCEKSKeystoreCredentialStore credStore = loadKeystore(cli.getOptionValue(KEYSTORE_LOCATION.getOpt())); Map<Integer, String> base64Keys = new HashMap<>(); Map<String, byte[]> keys = credStore.getAllEncodedKeys(); for (Map.Entry<String, byte[]> e: keys.entrySet()) { base64Keys.put(Integer.valueOf(e.getKey()), DatatypeConverter.printBase64Binary(e.getValue())); } OutputStreamWriter fOs = new OutputStreamWriter( new FileOutputStream(new File(cli.getOptionValue(OUTPUT_LOCATION.getOpt()))), StandardCharsets.UTF_8); Gson gson = new GsonBuilder().disableHtmlEscaping().create(); fOs.write(gson.toJson(base64Keys)); fOs.flush(); fOs.close(); } catch (ParseException e) { printUsage(); } catch (IOException e) { throw new RuntimeException(e); } } } }
/* * Licensed to Metamarkets Group Inc. (Metamarkets) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Metamarkets licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package io.druid.query.lookup; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableMap; import io.druid.jackson.DefaultObjectMapper; import io.druid.java.util.common.jackson.JacksonUtils; import io.druid.query.extraction.ExtractionFn; import io.druid.query.extraction.MapLookupExtractor; import org.easymock.EasyMock; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import javax.annotation.Nullable; import java.util.Arrays; import java.util.Map; public class RegisteredLookupExtractionFnTest { private static Map<String, String> MAP = ImmutableMap.of( "foo", "bar", "bat", "baz" ); private static final LookupExtractor LOOKUP_EXTRACTOR = new MapLookupExtractor(MAP, true); private static final String LOOKUP_NAME = "some lookup"; @Rule public ExpectedException expectedException = ExpectedException.none(); @Test public void testSimpleDelegation() { final LookupReferencesManager manager = EasyMock.createStrictMock(LookupReferencesManager.class); managerReturnsMap(manager); EasyMock.replay(manager); final RegisteredLookupExtractionFn fn = new RegisteredLookupExtractionFn( manager, LOOKUP_NAME, true, null, false, false ); EasyMock.verify(manager); Assert.assertEquals(false, fn.isInjective()); Assert.assertEquals(ExtractionFn.ExtractionType.MANY_TO_ONE, fn.getExtractionType()); for (String orig : Arrays.asList("", "foo", "bat")) { Assert.assertEquals(LOOKUP_EXTRACTOR.apply(orig), fn.apply(orig)); } Assert.assertEquals("not in the map", fn.apply("not in the map")); } @Test public void testInheritInjective() { final LookupReferencesManager manager = EasyMock.createStrictMock(LookupReferencesManager.class); managerReturnsMap(manager); EasyMock.replay(manager); final RegisteredLookupExtractionFn fn = new RegisteredLookupExtractionFn( manager, LOOKUP_NAME, true, null, null, false ); EasyMock.verify(manager); Assert.assertNull(fn.isInjective()); Assert.assertEquals(ExtractionFn.ExtractionType.ONE_TO_ONE, fn.getExtractionType()); } @Test public void testMissingDelegation() { final LookupReferencesManager manager = EasyMock.createStrictMock(LookupReferencesManager.class); EasyMock.expect(manager.get(EasyMock.eq(LOOKUP_NAME))).andReturn(null).once(); EasyMock.replay(manager); expectedException.expectMessage("Lookup [some lookup] not found"); try { new RegisteredLookupExtractionFn( manager, LOOKUP_NAME, true, null, true, false ).apply("foo"); } finally { EasyMock.verify(manager); } } @Test public void testNullLookup() { expectedException.expectMessage("`lookup` required"); new RegisteredLookupExtractionFn( null, null, true, null, true, false ); } @Test public void testSerDe() throws Exception { final ObjectMapper mapper = new DefaultObjectMapper(); final LookupReferencesManager manager = EasyMock.createStrictMock(LookupReferencesManager.class); managerReturnsMap(manager); EasyMock.replay(manager); final RegisteredLookupExtractionFn fn = new RegisteredLookupExtractionFn( manager, LOOKUP_NAME, true, null, true, false ); EasyMock.verify(manager); final Map<String, Object> result = mapper.readValue(mapper.writeValueAsString(fn), JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT); Assert.assertEquals(mapper.convertValue(fn, JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT), result); Assert.assertEquals(LOOKUP_NAME, result.get("lookup")); Assert.assertEquals(true, result.get("retainMissingValue")); Assert.assertEquals(true, result.get("injective")); Assert.assertNull(result.get("replaceMissingValueWith")); Assert.assertEquals(false, result.get("optimize")); } @Test public void testEquals() { final LookupReferencesManager manager = EasyMock.createStrictMock(LookupReferencesManager.class); managerReturnsMap(manager); EasyMock.replay(manager); final RegisteredLookupExtractionFn fn = new RegisteredLookupExtractionFn( manager, LOOKUP_NAME, false, "something", true, false ); Assert.assertEquals( fn, new RegisteredLookupExtractionFn( manager, LOOKUP_NAME, false, "something", true, false ) ); Assert.assertNotEquals( fn, new RegisteredLookupExtractionFn( manager, LOOKUP_NAME, true, null, true, false ) ); Assert.assertNotEquals( fn, new RegisteredLookupExtractionFn( manager, LOOKUP_NAME, false, "something else", true, false ) ); Assert.assertNotEquals( fn, new RegisteredLookupExtractionFn( manager, LOOKUP_NAME, false, "something", false, false ) ); Assert.assertNotEquals( fn, new RegisteredLookupExtractionFn( manager, LOOKUP_NAME, false, "something", true, true ) ); Assert.assertNotEquals( fn, new RegisteredLookupExtractionFn( manager, LOOKUP_NAME, false, null, true, false ) ); EasyMock.verify(manager); } private void managerReturnsMap(LookupReferencesManager manager) { EasyMock.expect(manager.get(EasyMock.eq(LOOKUP_NAME))).andReturn( new LookupExtractorFactoryContainer( "v0", new LookupExtractorFactory() { @Override public boolean start() { return false; } @Override public boolean replaces(@Nullable LookupExtractorFactory other) { return false; } @Override public boolean close() { return false; } @Nullable @Override public LookupIntrospectHandler getIntrospectHandler() { return null; } @Override public LookupExtractor get() { return LOOKUP_EXTRACTOR; } } ) ).anyTimes(); } }
// Copyright 2015 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.chrome.browser.native_page; import android.view.View; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.robolectric.annotation.Config; import org.chromium.base.test.BaseRobolectricTestRunner; import org.chromium.chrome.browser.native_page.NativePageFactory.NativePageType; import org.chromium.chrome.browser.tab.Tab; import org.chromium.chrome.browser.ui.native_page.NativePage; import org.chromium.components.embedder_support.util.UrlConstants; /** * Tests public methods in NativePageFactory. */ @RunWith(BaseRobolectricTestRunner.class) @Config(manifest = Config.NONE) public class NativePageFactoryTest { private NativePageFactory mNativePageFactory; private static class MockNativePage implements NativePage { public final @NativePageType int type; public int updateForUrlCalls; public MockNativePage(@NativePageType int type) { this.type = type; } @Override public void updateForUrl(String url) { updateForUrlCalls++; } @Override public String getUrl() { return null; } @Override public String getHost() { switch (type) { case NativePageType.NTP: return UrlConstants.NTP_HOST; case NativePageType.BOOKMARKS: return UrlConstants.BOOKMARKS_HOST; case NativePageType.RECENT_TABS: return UrlConstants.RECENT_TABS_HOST; case NativePageType.HISTORY: return UrlConstants.HISTORY_HOST; default: Assert.fail("Unexpected NativePageType: " + type); return null; } } @Override public void destroy() {} @Override public String getTitle() { return null; } @Override public int getBackgroundColor() { return 0; } @Override public boolean needsToolbarShadow() { return true; } @Override public View getView() { return null; } } private static class MockNativePageBuilder extends NativePageFactory.NativePageBuilder { private MockNativePageBuilder() { super(null, null); } @Override public NativePage buildNewTabPage(Tab tab) { return new MockNativePage(NativePageType.NTP); } @Override public NativePage buildBookmarksPage(Tab tab) { return new MockNativePage(NativePageType.BOOKMARKS); } @Override public NativePage buildRecentTabsPage(Tab tab) { return new MockNativePage(NativePageType.RECENT_TABS); } @Override public NativePage buildHistoryPage(Tab tab) { return new MockNativePage(NativePageType.HISTORY); } } private static class UrlCombo { public String url; public @NativePageType int expectedType; public UrlCombo(String url, @NativePageType int expectedType) { this.url = url; this.expectedType = expectedType; } } private static final UrlCombo[] VALID_URLS = { new UrlCombo("chrome-native://newtab", NativePageType.NTP), new UrlCombo("chrome-native://newtab/", NativePageType.NTP), new UrlCombo("chrome-native://bookmarks", NativePageType.BOOKMARKS), new UrlCombo("chrome-native://bookmarks/", NativePageType.BOOKMARKS), new UrlCombo("chrome-native://bookmarks/#245", NativePageType.BOOKMARKS), new UrlCombo("chrome-native://recent-tabs", NativePageType.RECENT_TABS), new UrlCombo("chrome-native://recent-tabs/", NativePageType.RECENT_TABS), new UrlCombo("chrome://history/", NativePageType.HISTORY)}; private static final String[] INVALID_URLS = { null, "", "newtab", "newtab@google.com:80", "/newtab", "://newtab", "chrome://", "chrome://most_visited", "chrome-native://", "chrome-native://newtablet", "chrome-native://bookmarks-inc", "chrome-native://recent_tabs", "chrome-native://recent-tabswitcher", "chrome-native://most_visited", "chrome-native://astronaut", "chrome-internal://newtab", "french-fries://newtab", "http://bookmarks", "https://recent-tabs", "newtab://recent-tabs", "recent-tabs bookmarks", }; private boolean isValidInIncognito(UrlCombo urlCombo) { return urlCombo.expectedType != NativePageType.RECENT_TABS; } @Before public void setUp() { mNativePageFactory = new NativePageFactory(null); mNativePageFactory.setNativePageBuilderForTesting(new MockNativePageBuilder()); } /** * Ensures that NativePageFactory.isNativePageUrl() returns true for native page URLs. */ @Test public void testPositiveIsNativePageUrl() { for (UrlCombo urlCombo : VALID_URLS) { String url = urlCombo.url; Assert.assertTrue(url, NativePageFactory.isNativePageUrl(url, false)); if (isValidInIncognito(urlCombo)) { Assert.assertTrue(url, NativePageFactory.isNativePageUrl(url, true)); } } } /** * Ensures that NativePageFactory.isNativePageUrl() returns false for URLs that don't * correspond to a native page. */ @Test public void testNegativeIsNativePageUrl() { for (String invalidUrl : INVALID_URLS) { Assert.assertFalse(invalidUrl, NativePageFactory.isNativePageUrl(invalidUrl, false)); Assert.assertFalse(invalidUrl, NativePageFactory.isNativePageUrl(invalidUrl, true)); } } /** * Ensures that NativePageFactory.createNativePageForURL() returns a native page of the right * type and reuses the candidate page if it's the right type. */ @Test public void testCreateNativePage() { @NativePageType int[] candidateTypes = new int[] {NativePageType.NONE, NativePageType.NTP, NativePageType.BOOKMARKS, NativePageType.RECENT_TABS, NativePageType.HISTORY}; for (boolean isIncognito : new boolean[] {true, false}) { for (UrlCombo urlCombo : VALID_URLS) { if (isIncognito && !isValidInIncognito(urlCombo)) continue; for (@NativePageType int candidateType : candidateTypes) { MockNativePage candidate = candidateType == NativePageType.NONE ? null : new MockNativePage(candidateType); MockNativePage page = (MockNativePage) mNativePageFactory.createNativePageForURL( urlCombo.url, candidate, null, isIncognito); String debugMessage = String.format( "Failed test case: isIncognito=%s, urlCombo={%s,%s}, candidateType=%s", isIncognito, urlCombo.url, urlCombo.expectedType, candidateType); Assert.assertNotNull(debugMessage, page); Assert.assertEquals(debugMessage, 1, page.updateForUrlCalls); Assert.assertEquals(debugMessage, urlCombo.expectedType, page.type); if (candidateType == urlCombo.expectedType) { Assert.assertSame(debugMessage, candidate, page); } else { Assert.assertNotSame(debugMessage, candidate, page); } } } } } /** * Ensures that NativePageFactory.createNativePageForURL() returns null for URLs that don't * correspond to a native page. */ @Test public void testCreateNativePageWithInvalidUrl() { for (UrlCombo urlCombo : VALID_URLS) { if (!isValidInIncognito(urlCombo)) { Assert.assertNull(urlCombo.url, mNativePageFactory.createNativePageForURL(urlCombo.url, null, null, true)); } } for (boolean isIncognito : new boolean[] {true, false}) { for (String invalidUrl : INVALID_URLS) { Assert.assertNull(invalidUrl, mNativePageFactory.createNativePageForURL( invalidUrl, null, null, isIncognito)); } } } }
/** * Copyright 2011 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.bitcoin.core; import java.math.BigInteger; import java.util.*; import com.google.bitcoin.store.BlockStore; import com.google.bitcoin.store.BlockStoreException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A BlockChain holds a series of {@link Block} objects, links them together, and knows how to verify that the * chain follows the rules of the {@link NetworkParameters} for this chain.<p> * * A BlockChain requires a {@link Wallet} to receive transactions that it finds during the initial download. However, * if you don't care about this, you can just pass in an empty wallet and nothing bad will happen.<p> * * A newly constructed BlockChain is empty. To fill it up, use a {@link Peer} object to download the chain from the * network.<p> * * <b>Notes</b><p> * * The 'chain' can actually be a tree although in normal operation it can be thought of as a simple list. In such a * situation there are multiple stories of the economy competing to become the one true consensus. This can happen * naturally when two miners solve a block within a few seconds of each other, or it can happen when the chain is * under attack.<p> * * A reference to the head block of every chain is stored. If you can reach the genesis block by repeatedly walking * through the prevBlock pointers, then we say this is a full chain. If you cannot reach the genesis block we say it is * an orphan chain.<p> * * Orphan chains can occur when blocks are solved and received during the initial block chain download, * or if we connect to a peer that doesn't send us blocks in order. */ public class BlockChain { private static final Logger log = LoggerFactory.getLogger(BlockChain.class); /** Keeps a map of block hashes to StoredBlocks. */ protected BlockStore blockStore; /** * Tracks the top of the best known chain.<p> * * Following this one down to the genesis block produces the story of the economy from the creation of BitCoin * until the present day. The chain head can change if a new set of blocks is received that results in a chain of * greater work than the one obtained by following this one down. In that case a reorganize is triggered, * potentially invalidating transactions in our wallet. */ protected StoredBlock chainHead; protected final NetworkParameters params; protected final List<Wallet> wallets; // Holds blocks that we have received but can't plug into the chain yet, eg because they were created whilst we // were downloading the block chain. private final ArrayList<Block> unconnectedBlocks = new ArrayList<Block>(); /** * Constructs a BlockChain connected to the given wallet and store. To obtain a {@link Wallet} you can construct * one from scratch, or you can deserialize a saved wallet from disk using {@link Wallet#loadFromFile(java.io.File)} * <p> * * For the store you can use a {@link com.google.bitcoin.store.MemoryBlockStore} if you don't care about saving the downloaded data, or a * {@link com.google.bitcoin.store.BoundedOverheadBlockStore} if you'd like to ensure fast startup the next time you run the program. */ public BlockChain(NetworkParameters params, Wallet wallet, BlockStore blockStore) throws BlockStoreException { this(params, new ArrayList<Wallet>(), blockStore); if (wallet != null) addWallet(wallet); } /** * Constructs a BlockChain that has no wallet at all. This is helpful when you don't actually care about sending * and receiving coins but rather, just want to explore the network data structures. */ public BlockChain(NetworkParameters params, BlockStore blockStore) throws BlockStoreException { this(params, new ArrayList<Wallet>(), blockStore); } /** * Constructs a BlockChain connected to the given list of wallets and a store. */ public BlockChain(NetworkParameters params, List<Wallet> wallets, BlockStore blockStore) throws BlockStoreException { this.blockStore = blockStore; chainHead = blockStore.getChainHead(); log.info("chain head is:\n{}", chainHead.getHeader()); this.params = params; this.wallets = new ArrayList<Wallet>(wallets); } /** * Add a wallet to the BlockChain. Note that the wallet will be unaffected by any blocks received while it * was not part of this BlockChain. This method is useful if the wallet has just been created, and its keys * have never been in use, or if the wallet has been loaded along with the BlockChain */ public synchronized void addWallet(Wallet wallet) { wallets.add(wallet); } /** * Processes a received block and tries to add it to the chain. If there's something wrong with the block an * exception is thrown. If the block is OK but cannot be connected to the chain at this time, returns false. * If the block can be connected to the chain, returns true. */ public synchronized boolean add(Block block) throws VerificationException, ScriptException { try { return add(block, true); } catch (BlockStoreException e) { // TODO: Figure out a better way to propagate this exception to the user. throw new RuntimeException(e); } } // Stat counters. private long statsLastTime = System.currentTimeMillis(); private long statsBlocksAdded; private synchronized boolean add(Block block, boolean tryConnecting) throws BlockStoreException, VerificationException, ScriptException { if (System.currentTimeMillis() - statsLastTime > 1000) { // More than a second passed since last stats logging. log.info("{} blocks per second", statsBlocksAdded); statsLastTime = System.currentTimeMillis(); statsBlocksAdded = 0; } // We check only the chain head for double adds here to avoid potentially expensive block chain misses. if (block.equals(chainHead.getHeader())) { // Duplicate add of the block at the top of the chain, can be a natural artifact of the download process. return true; } // Does this block contain any transactions we might care about? Check this up front before verifying the // blocks validity so we can skip the merkle root verification if the contents aren't interesting. This saves // a lot of time for big blocks. boolean contentsImportant = false; HashMap<Wallet, List<Transaction>> walletToTxMap = new HashMap<Wallet, List<Transaction>>();; if (block.transactions != null) { scanTransactions(block, walletToTxMap); contentsImportant = walletToTxMap.size() > 0; } // Prove the block is internally valid: hash is lower than target, etc. This only checks the block contents // if there is a tx sending or receiving coins using an address in one of our wallets. And those transactions // are only lightly verified: presence in a valid connecting block is taken as proof of validity. See the // article here for more details: http://code.google.com/p/bitcoinj/wiki/SecurityModel try { block.verifyHeader(); if (contentsImportant) block.verifyTransactions(); } catch (VerificationException e) { log.error("Failed to verify block: ", e); log.error(block.getHashAsString()); throw e; } // Try linking it to a place in the currently known blocks. StoredBlock storedPrev = blockStore.get(block.getPrevBlockHash()); if (storedPrev == null) { // We can't find the previous block. Probably we are still in the process of downloading the chain and a // block was solved whilst we were doing it. We put it to one side and try to connect it later when we // have more blocks. log.warn("Block does not connect: {}", block.getHashAsString()); unconnectedBlocks.add(block); return false; } else { // It connects to somewhere on the chain. Not necessarily the top of the best known chain. // // Create a new StoredBlock from this block. It will throw away the transaction data so when block goes // out of scope we will reclaim the used memory. StoredBlock newStoredBlock = storedPrev.build(block); checkDifficultyTransitions(storedPrev, newStoredBlock); blockStore.put(newStoredBlock); connectBlock(newStoredBlock, storedPrev, walletToTxMap); } if (tryConnecting) tryConnectingUnconnected(); statsBlocksAdded++; return true; } private void connectBlock(StoredBlock newStoredBlock, StoredBlock storedPrev, HashMap<Wallet, List<Transaction>> newTransactions) throws BlockStoreException, VerificationException { if (storedPrev.equals(chainHead)) { // This block connects to the best known block, it is a normal continuation of the system. setChainHead(newStoredBlock); log.debug("Chain is now {} blocks high", chainHead.getHeight()); if (newTransactions != null) sendTransactionsToWallet(newStoredBlock, NewBlockType.BEST_CHAIN, newTransactions); } else { // This block connects to somewhere other than the top of the best known chain. We treat these differently. // // Note that we send the transactions to the wallet FIRST, even if we're about to re-organize this block // to become the new best chain head. This simplifies handling of the re-org in the Wallet class. boolean haveNewBestChain = newStoredBlock.moreWorkThan(chainHead); if (haveNewBestChain) { log.info("Block is causing a re-organize"); } else { StoredBlock splitPoint = findSplit(newStoredBlock, chainHead); String splitPointHash = splitPoint != null ? splitPoint.getHeader().getHashAsString() : "?"; log.info("Block forks the chain at {}, but it did not cause a reorganize:\n{}", splitPointHash, newStoredBlock); } // We may not have any transactions if we received only a header. That never happens today but will in // future when getheaders is used as an optimization. if (newTransactions != null) { sendTransactionsToWallet(newStoredBlock, NewBlockType.SIDE_CHAIN, newTransactions); } if (haveNewBestChain) handleNewBestChain(newStoredBlock); } } /** * Called as part of connecting a block when the new block results in a different chain having higher total work. */ private void handleNewBestChain(StoredBlock newChainHead) throws BlockStoreException, VerificationException { // This chain has overtaken the one we currently believe is best. Reorganize is required. // // Firstly, calculate the block at which the chain diverged. We only need to examine the // chain from beyond this block to find differences. StoredBlock splitPoint = findSplit(newChainHead, chainHead); log.info("Re-organize after split at height {}", splitPoint.getHeight()); log.info("Old chain head: {}", chainHead.getHeader().getHashAsString()); log.info("New chain head: {}", newChainHead.getHeader().getHashAsString()); log.info("Split at block: {}", splitPoint.getHeader().getHashAsString()); // Then build a list of all blocks in the old part of the chain and the new part. List<StoredBlock> oldBlocks = getPartialChain(chainHead, splitPoint); List<StoredBlock> newBlocks = getPartialChain(newChainHead, splitPoint); // Now inform the wallets. This is necessary so the set of currently active transactions (that we can spend) // can be updated to take into account the re-organize. We might also have received new coins we didn't have // before and our previous spends might have been undone. for (Wallet wallet : wallets) { wallet.reorganize(oldBlocks, newBlocks); } // Update the pointer to the best known block. setChainHead(newChainHead); } /** * Returns the set of contiguous blocks between 'higher' and 'lower'. Higher is included, lower is not. */ private List<StoredBlock> getPartialChain(StoredBlock higher, StoredBlock lower) throws BlockStoreException { assert higher.getHeight() > lower.getHeight(); LinkedList<StoredBlock> results = new LinkedList<StoredBlock>(); StoredBlock cursor = higher; while (true) { results.add(cursor); cursor = cursor.getPrev(blockStore); assert cursor != null : "Ran off the end of the chain"; if (cursor.equals(lower)) break; } return results; } /** * Locates the point in the chain at which newStoredBlock and chainHead diverge. Returns null if no split point was * found (ie they are part of the same chain). */ private StoredBlock findSplit(StoredBlock newChainHead, StoredBlock chainHead) throws BlockStoreException { StoredBlock currentChainCursor = chainHead; StoredBlock newChainCursor = newChainHead; // Loop until we find the block both chains have in common. Example: // // A -> B -> C -> D // \--> E -> F -> G // // findSplit will return block B. chainHead = D and newChainHead = G. while (!currentChainCursor.equals(newChainCursor)) { if (currentChainCursor.getHeight() > newChainCursor.getHeight()) { currentChainCursor = currentChainCursor.getPrev(blockStore); assert currentChainCursor != null : "Attempt to follow an orphan chain"; } else { newChainCursor = newChainCursor.getPrev(blockStore); assert newChainCursor != null : "Attempt to follow an orphan chain"; } } return currentChainCursor; } enum NewBlockType { BEST_CHAIN, SIDE_CHAIN } private void sendTransactionsToWallet(StoredBlock block, NewBlockType blockType, HashMap<Wallet, List<Transaction>> newTransactions) throws VerificationException { for (Map.Entry<Wallet, List<Transaction>> entry : newTransactions.entrySet()) { try { List<Transaction> txns = entry.getValue(); for (Transaction tx : txns) { entry.getKey().receive(tx, block, blockType); } } catch (ScriptException e) { // We don't want scripts we don't understand to break the block chain so just note that this tx was // not scanned here and continue. log.warn("Failed to parse a script: " + e.toString()); } } } private void setChainHead(StoredBlock chainHead) throws BlockStoreException { blockStore.setChainHead(chainHead); this.chainHead = chainHead; } /** * For each block in unconnectedBlocks, see if we can now fit it on top of the chain and if so, do so. */ private void tryConnectingUnconnected() throws VerificationException, ScriptException, BlockStoreException { // For each block in our unconnected list, try and fit it onto the head of the chain. If we succeed remove it // from the list and keep going. If we changed the head of the list at the end of the round try again until // we can't fit anything else on the top. int blocksConnectedThisRound; do { blocksConnectedThisRound = 0; Iterator<Block> iter = unconnectedBlocks.iterator(); while (iter.hasNext()) { Block block = iter.next(); // Look up the blocks previous. StoredBlock prev = blockStore.get(block.getPrevBlockHash()); if (prev == null) { // This is still an unconnected/orphan block. continue; } // Otherwise we can connect it now. // False here ensures we don't recurse infinitely downwards when connecting huge chains. add(block, false); iter.remove(); blocksConnectedThisRound++; } if (blocksConnectedThisRound > 0) { log.info("Connected {} floating blocks.", blocksConnectedThisRound); } } while (blocksConnectedThisRound > 0); } /** * Throws an exception if the blocks difficulty is not correct. */ private void checkDifficultyTransitions(StoredBlock storedPrev, StoredBlock storedNext) throws BlockStoreException, VerificationException { Block prev = storedPrev.getHeader(); Block next = storedNext.getHeader(); // Is this supposed to be a difficulty transition point? if ((storedPrev.getHeight() + 1) % params.interval != 0) { // No ... so check the difficulty didn't actually change. if (next.getDifficultyTarget() != prev.getDifficultyTarget()) throw new VerificationException("Unexpected change in difficulty at height " + storedPrev.getHeight() + ": " + Long.toHexString(next.getDifficultyTarget()) + " vs " + Long.toHexString(prev.getDifficultyTarget())); return; } // We need to find a block far back in the chain. It's OK that this is expensive because it only occurs every // two weeks after the initial block chain download. long now = System.currentTimeMillis(); StoredBlock cursor = blockStore.get(prev.getHash()); for (int i = 0; i < params.interval - 1; i++) { if (cursor == null) { // This should never happen. If it does, it means we are following an incorrect or busted chain. throw new VerificationException( "Difficulty transition point but we did not find a way back to the genesis block."); } cursor = blockStore.get(cursor.getHeader().getPrevBlockHash()); } log.debug("Difficulty transition traversal took {}msec", System.currentTimeMillis() - now); Block blockIntervalAgo = cursor.getHeader(); int timespan = (int) (prev.getTimeSeconds() - blockIntervalAgo.getTimeSeconds()); // Limit the adjustment step. if (timespan < params.targetTimespan / 4) timespan = params.targetTimespan / 4; if (timespan > params.targetTimespan * 4) timespan = params.targetTimespan * 4; BigInteger newDifficulty = Utils.decodeCompactBits(blockIntervalAgo.getDifficultyTarget()); newDifficulty = newDifficulty.multiply(BigInteger.valueOf(timespan)); newDifficulty = newDifficulty.divide(BigInteger.valueOf(params.targetTimespan)); if (newDifficulty.compareTo(params.proofOfWorkLimit) > 0) { log.debug("Difficulty hit proof of work limit: {}", newDifficulty.toString(16)); newDifficulty = params.proofOfWorkLimit; } int accuracyBytes = (int) (next.getDifficultyTarget() >>> 24) - 3; BigInteger receivedDifficulty = next.getDifficultyTargetAsInteger(); // The calculated difficulty is to a higher precision than received, so reduce here. BigInteger mask = BigInteger.valueOf(0xFFFFFFL).shiftLeft(accuracyBytes * 8); newDifficulty = newDifficulty.and(mask); if (newDifficulty.compareTo(receivedDifficulty) != 0) throw new VerificationException("Network provided difficulty bits do not match what was calculated: " + receivedDifficulty.toString(16) + " vs " + newDifficulty.toString(16)); } /** * For the transactions in the given block, update the txToWalletMap such that each wallet maps to a list of * transactions for which it is relevant. */ private void scanTransactions(Block block, HashMap<Wallet, List<Transaction>> walletToTxMap) throws VerificationException { for (Transaction tx : block.transactions) { try { for (Wallet wallet : wallets) { boolean shouldReceive = false; for (TransactionOutput output : tx.outputs) { // TODO: Handle more types of outputs, not just regular to address outputs. if (output.getScriptPubKey().isSentToIP()) continue; // This is not thread safe as a key could be removed between the call to isMine and receive. if (output.isMine(wallet)) { shouldReceive = true; break; } } // Coinbase transactions don't have anything useful in their inputs (as they create coins out of thin air). if (!shouldReceive && !tx.isCoinBase()) { for (TransactionInput i : tx.inputs) { byte[] pubkey = i.getScriptSig().getPubKey(); // This is not thread safe as a key could be removed between the call to isPubKeyMine and receive. if (wallet.isPubKeyMine(pubkey)) { shouldReceive = true; } } } if (!shouldReceive) continue; List<Transaction> txList = walletToTxMap.get(wallet); if (txList == null) { txList = new LinkedList<Transaction>(); walletToTxMap.put(wallet, txList); } txList.add(tx); } } catch (ScriptException e) { // We don't want scripts we don't understand to break the block chain so just note that this tx was // not scanned here and continue. log.warn("Failed to parse a script: " + e.toString()); } } } /** * Returns the block at the head of the current best chain. This is the block which represents the greatest * amount of cumulative work done. */ public synchronized StoredBlock getChainHead() { return chainHead; } /** * Returns the most recent unconnected block or null if there are none. This will all have to change. */ public synchronized Block getUnconnectedBlock() { if (unconnectedBlocks.size() == 0) return null; return unconnectedBlocks.get(unconnectedBlocks.size() - 1); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.asterix.optimizer.rules; import java.util.HashSet; import java.util.List; import org.apache.asterix.metadata.declared.MetadataProvider; import org.apache.asterix.om.base.AOrderedList; import org.apache.asterix.om.constants.AsterixConstantValue; import org.apache.asterix.om.functions.BuiltinFunctions; import org.apache.asterix.om.types.AOrderedListType; import org.apache.asterix.om.types.IAType; import org.apache.commons.lang3.mutable.Mutable; import org.apache.commons.lang3.mutable.MutableObject; import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException; import org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression; import org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator; import org.apache.hyracks.algebricks.core.algebra.base.IOptimizationContext; import org.apache.hyracks.algebricks.core.algebra.base.LogicalExpressionTag; import org.apache.hyracks.algebricks.core.algebra.base.LogicalOperatorTag; import org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable; import org.apache.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression; import org.apache.hyracks.algebricks.core.algebra.expressions.BroadcastExpressionAnnotation; import org.apache.hyracks.algebricks.core.algebra.expressions.ConstantExpression; import org.apache.hyracks.algebricks.core.algebra.expressions.IndexedNLJoinExpressionAnnotation; import org.apache.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression; import org.apache.hyracks.algebricks.core.algebra.expressions.UnnestingFunctionCallExpression; import org.apache.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression; import org.apache.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions; import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier; import org.apache.hyracks.algebricks.core.algebra.functions.IFunctionInfo; import org.apache.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator; import org.apache.hyracks.algebricks.core.algebra.operators.logical.InnerJoinOperator; import org.apache.hyracks.algebricks.core.algebra.operators.logical.SelectOperator; import org.apache.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator; import org.apache.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule; import org.apache.hyracks.api.exceptions.SourceLocation; public class DisjunctivePredicateToJoinRule implements IAlgebraicRewriteRule { @Override public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException { MetadataProvider metadataProvider = (MetadataProvider) context.getMetadataProvider(); if (metadataProvider.isBlockingOperatorDisabled()) { return false; } SelectOperator select; if ((select = asSelectOperator(opRef)) == null) { return false; } AbstractFunctionCallExpression condEx; if ((condEx = asFunctionCallExpression(select.getCondition(), AlgebricksBuiltinFunctions.OR)) == null) { return false; } List<Mutable<ILogicalExpression>> args = condEx.getArguments(); VariableReferenceExpression varEx = null; IAType valType = null; HashSet<AsterixConstantValue> values = new HashSet<AsterixConstantValue>(); for (Mutable<ILogicalExpression> arg : args) { AbstractFunctionCallExpression fctCall; if ((fctCall = asFunctionCallExpression(arg, AlgebricksBuiltinFunctions.EQ)) == null) { return false; } boolean haveConst = false; boolean haveVar = false; List<Mutable<ILogicalExpression>> fctArgs = fctCall.getArguments(); for (Mutable<ILogicalExpression> fctArg : fctArgs) { final ILogicalExpression argExpr = fctArg.getValue(); switch (argExpr.getExpressionTag()) { case CONSTANT: haveConst = true; AsterixConstantValue value = (AsterixConstantValue) ((ConstantExpression) argExpr).getValue(); if (valType == null) { valType = value.getObject().getType(); } else if (!isCompatible(valType, value.getObject().getType())) { return false; } values.add(value); break; case VARIABLE: haveVar = true; final VariableReferenceExpression varArg = (VariableReferenceExpression) argExpr; if (varEx == null) { varEx = varArg; } else if (!varEx.getVariableReference().equals(varArg.getVariableReference())) { return false; } break; default: return false; } } if (!(haveVar && haveConst)) { return false; } } SourceLocation sourceLoc = select.getSourceLocation(); AOrderedList list = new AOrderedList(new AOrderedListType(valType, "orderedlist")); for (AsterixConstantValue value : values) { list.add(value.getObject()); } EmptyTupleSourceOperator ets = new EmptyTupleSourceOperator(); context.computeAndSetTypeEnvironmentForOperator(ets); ILogicalExpression cExp = new ConstantExpression(new AsterixConstantValue(list)); Mutable<ILogicalExpression> mutCExp = new MutableObject<>(cExp); IFunctionInfo scanFctInfo = BuiltinFunctions.getAsterixFunctionInfo(BuiltinFunctions.SCAN_COLLECTION); UnnestingFunctionCallExpression scanExp = new UnnestingFunctionCallExpression(scanFctInfo, mutCExp); scanExp.setSourceLocation(sourceLoc); LogicalVariable scanVar = context.newVar(); UnnestOperator unn = new UnnestOperator(scanVar, new MutableObject<>(scanExp)); unn.setSourceLocation(sourceLoc); unn.getInputs().add(new MutableObject<>(ets)); context.computeAndSetTypeEnvironmentForOperator(unn); IFunctionInfo eqFctInfo = BuiltinFunctions.getAsterixFunctionInfo(AlgebricksBuiltinFunctions.EQ); AbstractFunctionCallExpression eqExp = new ScalarFunctionCallExpression(eqFctInfo); eqExp.setSourceLocation(sourceLoc); VariableReferenceExpression scanVarRef = new VariableReferenceExpression(scanVar); scanVarRef.setSourceLocation(sourceLoc); eqExp.getArguments().add(new MutableObject<>(scanVarRef)); eqExp.getArguments().add(new MutableObject<>(varEx.cloneExpression())); eqExp.getAnnotations().put(IndexedNLJoinExpressionAnnotation.INSTANCE, IndexedNLJoinExpressionAnnotation.INSTANCE); BroadcastExpressionAnnotation bcast = new BroadcastExpressionAnnotation(); bcast.setObject(BroadcastExpressionAnnotation.BroadcastSide.LEFT); // Broadcast the OR predicates branch. eqExp.getAnnotations().put(BroadcastExpressionAnnotation.BROADCAST_ANNOTATION_KEY, bcast); InnerJoinOperator jOp = new InnerJoinOperator(new MutableObject<>(eqExp)); jOp.setSourceLocation(sourceLoc); jOp.getInputs().add(new MutableObject<>(unn)); jOp.getInputs().add(select.getInputs().get(0)); opRef.setValue(jOp); context.computeAndSetTypeEnvironmentForOperator(jOp); return true; } @Override public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) { return false; } /** * This checks the compatibility the types of the constants to ensure that the comparison behaves as expected * when joining. Right now this compatibility is defined as type equality, but it could we relaxed. * Once type promotion works correctly in all parts of the system, this check should not be needed anymore. * (see https://code.google.com/p/asterixdb/issues/detail?id=716) * * @param t1 * one type * @param t2 * another type * @return true, if types are equal */ private static boolean isCompatible(IAType t1, IAType t2) { return t1.equals(t2); } // some helpers private static SelectOperator asSelectOperator(ILogicalOperator op) { return op.getOperatorTag() == LogicalOperatorTag.SELECT ? (SelectOperator) op : null; } private static SelectOperator asSelectOperator(Mutable<ILogicalOperator> op) { return asSelectOperator(op.getValue()); } private static AbstractFunctionCallExpression asFunctionCallExpression(ILogicalExpression ex, FunctionIdentifier fi) { AbstractFunctionCallExpression fctCall = (ex.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL ? (AbstractFunctionCallExpression) ex : null); if (fctCall != null && (fi == null || fctCall.getFunctionIdentifier().equals(fi))) return fctCall; return null; } private static AbstractFunctionCallExpression asFunctionCallExpression(Mutable<ILogicalExpression> ex, FunctionIdentifier fi) { return asFunctionCallExpression(ex.getValue(), fi); } }
/** * Copyright 2010 Mark Wyszomierski * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.android; import com.joelapenna.foursquared.R; import android.app.Activity; import android.app.ProgressDialog; import android.content.Intent; import android.graphics.Bitmap; import android.graphics.Color; import android.graphics.Typeface; import android.net.Uri; import android.os.Bundle; import android.util.Log; import android.view.Window; import android.view.ViewGroup.LayoutParams; import android.webkit.CookieManager; import android.webkit.CookieSyncManager; import android.webkit.WebView; import android.webkit.WebViewClient; import android.widget.LinearLayout; import android.widget.TextView; /** * This activity can be used to run a facebook url request through a webview. * The user must supply these intent extras: * <ul> * <li>INTENT_EXTRA_ACTION - string, which facebook action to perform, like * "login", or "stream.publish".</li> * <li>INTENT_EXTRA_KEY_APP_ID - string, facebook developer key.</li> * <li>INTENT_EXTRA_KEY_PERMISSIONS - string array, set of facebook permissions * you want to use.</li> * </ul> * or you can supply only INTENT_EXTRA_KEY_CLEAR_COOKIES to just have the * activity clear its stored cookies (you can also supply it in combination with * the above flags to clear cookies before trying to run a request too). If * you've already authenticated the user, you can optionally pass in the token * and expiration time as intent extras using: * <ul> * <li>INTENT_EXTRA_AUTHENTICATED_TOKEN</li> * <li>INTENT_EXTRA_AUTHENTICATED_EXPIRES</li> * </ul> * they will then be used in web requests. You should use * <code>startActivityForResult</code> to start the activity. When the activity * finishes, it will return status code RESULT_OK. You can then check the * returned intent data object for: * <ul> * <li>INTENT_RESULT_KEY_RESULT_STATUS - boolean, whether the request succeeded * or not.</li> * <li>INTENT_RESULT_KEY_SUPPLIED_ACTION - string, the action you supplied as an * intent extra echoed back as a convenience.</li> * <li>INTENT_RESULT_KEY_RESULT_BUNDLE - bundle, present if request succeeded, * will have all the returned parameters as supplied by the WebView operation.</li> * <li>INTENT_RESULT_KEY_ERROR - string, present if request failed.</li> * </ul> * If the user canceled this activity, the activity result code will be * RESULT_CANCELED and there will be no intent data returned. You need the * <code>android.permission.INTERNET</code> permission added to your manifest. * You need to add this activity definition to your manifest. You can prevent * this activity from restarting on rotation so the network operations are * preserved like so: <activity * android:name="com.facebook.android.FacebookWebViewActivity" * android:configChanges="orientation|keyboardHidden" /> * * @date June 14, 2010 * @author Mark Wyszomierski (markww@gmail.com) */ public class FacebookWebViewActivity extends Activity { private static final String TAG = "FacebookWebViewActivity"; public static final String INTENT_EXTRA_ACTION = "com.facebook.android.FacebookWebViewActivity.action"; public static final String INTENT_EXTRA_KEY_APP_ID = "com.facebook.android.FacebookWebViewActivity.appid"; public static final String INTENT_EXTRA_KEY_PERMISSIONS = "com.facebook.android.FacebookWebViewActivity.permissions"; public static final String INTENT_EXTRA_AUTHENTICATED_TOKEN = "com.facebook.android.FacebookWebViewActivity.authenticated_token"; public static final String INTENT_EXTRA_AUTHENTICATED_EXPIRES = "com.facebook.android.FacebookWebViewActivity.authenticated_expires"; public static final String INTENT_EXTRA_KEY_CLEAR_COOKIES = "com.facebook.android.FacebookWebViewActivity.clear_cookies"; public static final String INTENT_EXTRA_KEY_DEBUG = "com.facebook.android.FacebookWebViewActivity.debug"; public static final String INTENT_RESULT_KEY_RESULT_STATUS = "result_status"; public static final String INTENT_RESULT_KEY_SUPPLIED_ACTION = "supplied_action"; public static final String INTENT_RESULT_KEY_RESULT_BUNDLE = "bundle"; public static final String INTENT_RESULT_KEY_ERROR = "error"; private static final String DISPLAY_STRING = "display=touch"; private static final int FB_BLUE = 0xFF6D84B4; private static final int MARGIN = 4; private static final int PADDING = 2; private TextView mTitle; private WebView mWebView; private ProgressDialog mSpinner; private String mAction; private String mAppId; private String[] mPermissions; private boolean mDebug; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); requestWindowFeature(Window.FEATURE_NO_TITLE); CookieSyncManager.createInstance(this); LinearLayout ll = new LinearLayout(this); ll.setOrientation(LinearLayout.VERTICAL); ll.setLayoutParams(new LayoutParams(LayoutParams.FILL_PARENT, LayoutParams.FILL_PARENT)); mTitle = new TextView(this); mTitle.setText("Facebook"); mTitle.setTextColor(Color.WHITE); mTitle.setTypeface(Typeface.DEFAULT_BOLD); mTitle.setBackgroundColor(FB_BLUE); mTitle.setPadding(MARGIN + PADDING, MARGIN, MARGIN, MARGIN); mTitle.setCompoundDrawablePadding(MARGIN + PADDING); mTitle.setCompoundDrawablesWithIntrinsicBounds(this.getResources().getDrawable( R.drawable.facebook_icon), null, null, null); ll.addView(mTitle); mWebView = new WebView(this); mWebView.setLayoutParams(new LayoutParams( LayoutParams.FILL_PARENT, LayoutParams.FILL_PARENT)); mWebView.setWebViewClient(new WebViewClientFacebook()); mWebView.setVerticalScrollBarEnabled(false); mWebView.setHorizontalScrollBarEnabled(false); mWebView.getSettings().setJavaScriptEnabled(true); ll.addView(mWebView); mSpinner = new ProgressDialog(this); mSpinner.requestWindowFeature(Window.FEATURE_NO_TITLE); mSpinner.setMessage("Loading..."); setContentView(ll, new LinearLayout.LayoutParams(LayoutParams.FILL_PARENT, LayoutParams.FILL_PARENT)); Bundle extras = getIntent().getExtras(); if (extras != null) { if (extras.containsKey(INTENT_EXTRA_KEY_DEBUG)) { mDebug = extras.getBoolean(INTENT_EXTRA_KEY_DEBUG); } if (extras.containsKey(INTENT_EXTRA_ACTION)) { if (extras.getBoolean(INTENT_EXTRA_KEY_CLEAR_COOKIES, false)) { clearCookies(); } if (extras.containsKey(INTENT_EXTRA_KEY_APP_ID)) { if (extras.containsKey(INTENT_EXTRA_KEY_PERMISSIONS)) { mAction = extras.getString(INTENT_EXTRA_ACTION); mAppId = extras.getString(INTENT_EXTRA_KEY_APP_ID); mPermissions = extras.getStringArray(INTENT_EXTRA_KEY_PERMISSIONS); // If the user supplied a pre-authenticated info, use it // here. Facebook facebook = new Facebook(); if (extras.containsKey(INTENT_EXTRA_AUTHENTICATED_TOKEN) && extras.containsKey(INTENT_EXTRA_AUTHENTICATED_EXPIRES)) { facebook.setAccessToken(extras .getString(INTENT_EXTRA_AUTHENTICATED_TOKEN)); facebook.setAccessExpires(extras .getLong(INTENT_EXTRA_AUTHENTICATED_EXPIRES)); if (mDebug) { Log.d(TAG, "onCreate(): authenticated token being used."); } } // Generate the url based on the action. String url = facebook.generateUrl(mAction, mAppId, mPermissions); if (mDebug) { String permissionsDump = "(null)"; if (mPermissions != null) { if (mPermissions.length > 0) { for (int i = 0; i < mPermissions.length; i++) { permissionsDump += mPermissions[i] + ", "; } } else { permissionsDump = "[empty]"; } } Log.d(TAG, "onCreate(): action: " + mAction + ", appid: " + mAppId + ", permissions: " + permissionsDump); Log.d(TAG, "onCreate(): Loading url: " + url); } // Start the request finally. mWebView.loadUrl(url); } else { Log.e(TAG, "Missing intent extra: INTENT_EXTRA_KEY_PERMISSIONS, finishing immediately."); finish(); } } else { Log.e(TAG, "Missing intent extra: INTENT_EXTRA_KEY_APP_ID, finishing immediately."); finish(); } } else if (extras.getBoolean(INTENT_EXTRA_KEY_CLEAR_COOKIES)) { clearCookies(); } else { Log.e(TAG, "Missing intent extra: INTENT_EXTRA_ACTION or INTENT_EXTRA_KEY_CLEAR_COOKIES, finishing immediately."); finish(); } } else { Log.e(TAG, "No intent extras supplied, finishing immediately."); finish(); } } private void clearCookies() { CookieManager cookieManager = CookieManager.getInstance(); cookieManager.removeAllCookie(); } @Override protected void onResume() { super.onResume(); CookieSyncManager.getInstance().startSync(); } @Override protected void onPause() { super.onPause(); CookieSyncManager.getInstance().stopSync(); } private class WebViewClientFacebook extends WebViewClient { @Override public boolean shouldOverrideUrlLoading(WebView view, String url) { if (mDebug) { Log.d(TAG, "WebViewClientFacebook:shouldOverrideUrlLoading(): " + url); } if (url.startsWith(Facebook.REDIRECT_URI)) { Bundle values = FacebookUtil.parseUrl(url); String error = values.getString("error_reason"); Log.e("TTTT", error); Intent result = new Intent(); result.putExtra(INTENT_RESULT_KEY_SUPPLIED_ACTION, mAction); if (error == null) { CookieSyncManager.getInstance().sync(); result.putExtra(INTENT_RESULT_KEY_RESULT_STATUS, true); result.putExtra(INTENT_RESULT_KEY_RESULT_BUNDLE, values); FacebookWebViewActivity.this.setResult(Activity.RESULT_OK, result); } else { result.putExtra(INTENT_RESULT_KEY_RESULT_STATUS, false); result.putExtra(INTENT_RESULT_KEY_SUPPLIED_ACTION, mAction); result.putExtra(INTENT_RESULT_KEY_ERROR, error); FacebookWebViewActivity.this.setResult(Activity.RESULT_OK, result); } FacebookWebViewActivity.this.finish(); return true; } else if (url.startsWith(Facebook.CANCEL_URI)) { FacebookWebViewActivity.this.setResult(Activity.RESULT_CANCELED); FacebookWebViewActivity.this.finish(); return true; } else if (url.contains(DISPLAY_STRING)) { return false; } // Launch non-dialog URLs in a full browser. startActivity(new Intent(Intent.ACTION_VIEW, Uri.parse(url))); return true; } @Override public void onReceivedError(WebView view, int errorCode, String description, String failingUrl) { super.onReceivedError(view, errorCode, description, failingUrl); if (mDebug) { Log.d(TAG, "WebViewClientFacebook:onReceivedError(): " + errorCode + ", " + description + ", " + failingUrl); } Intent result = new Intent(); result.putExtra(INTENT_RESULT_KEY_RESULT_STATUS, false); result.putExtra(INTENT_RESULT_KEY_SUPPLIED_ACTION, mAction); result.putExtra(INTENT_RESULT_KEY_ERROR, description + ", " + errorCode + ", " + failingUrl); FacebookWebViewActivity.this.setResult(Activity.RESULT_OK, result); FacebookWebViewActivity.this.finish(); } @Override public void onPageStarted(WebView view, String url, Bitmap favicon) { super.onPageStarted(view, url, favicon); if (mDebug) { Log.d(TAG, "WebViewClientFacebook:onPageStarted(): " + url); } mSpinner.show(); } @Override public void onPageFinished(WebView view, String url) { super.onPageFinished(view, url); if (mDebug) { Log.d(TAG, "WebViewClientFacebook:onPageFinished(): " + url); } String title = mWebView.getTitle(); if (title != null && title.length() > 0) { mTitle.setText(title); } mSpinner.dismiss(); } } }
// ================================================================================================= // Copyright 2011 Twitter, Inc. // ------------------------------------------------------------------------------------------------- // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this work except in compliance with the License. // You may obtain a copy of the License in the LICENSE file, or at: // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // ================================================================================================= package com.twitter.common.net.http.handlers; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.zip.GZIPInputStream; import javax.servlet.ServletOutputStream; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import com.google.common.io.ByteStreams; import com.google.common.io.InputSupplier; import org.junit.Before; import org.junit.Test; import com.twitter.common.net.http.handlers.AssetHandler.StaticAsset; import com.twitter.common.testing.EasyMockTest; import static com.twitter.common.net.http.handlers.AssetHandler.CACHE_CONTROL_MAX_AGE_SECS; import static javax.servlet.http.HttpServletResponse.SC_NOT_MODIFIED; import static javax.servlet.http.HttpServletResponse.SC_OK; import static org.easymock.EasyMock.expect; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertThat; /** * @author William Farner */ public class AssetHandlerTest extends EasyMockTest { private static final String TEST_DATA = "here is my great test data"; // Checksum of the gzipped TEST_DATA. private static final String TEST_DATA_CHECKSUM = "ePvVhtAeVRu85KSOLKL0oQ=="; private static final String CONTENT_TYPE = "text/plain"; private InputSupplier<InputStream> inputSupplier; @Before public void setUp() { inputSupplier = createMock(new Clazz<InputSupplier<InputStream>>() {}); } private static class Request { private final HttpServletRequest req; private final HttpServletResponse resp; private final ByteArrayOutputStream responseBody; Request(HttpServletRequest req, HttpServletResponse resp, ByteArrayOutputStream responseBody) { this.req = req; this.resp = resp; this.responseBody = responseBody; } } private Request doGet(String suppliedChecksum, String supportedEncodings, int expectedResponseCode, boolean expectRead) throws Exception { HttpServletRequest req = createMock(HttpServletRequest.class); HttpServletResponse resp = createMock(HttpServletResponse.class); if (expectRead) { expect(inputSupplier.getInput()).andReturn(new ByteArrayInputStream(TEST_DATA.getBytes())); } expect(req.getHeader("If-None-Match")).andReturn(suppliedChecksum); resp.setStatus(expectedResponseCode); if (expectedResponseCode == SC_OK) { expect(req.getHeader("Accept-Encoding")).andReturn(supportedEncodings); resp.setHeader("Cache-Control", "public,max-age=" + CACHE_CONTROL_MAX_AGE_SECS); resp.setHeader("ETag", TEST_DATA_CHECKSUM); resp.setContentType(CONTENT_TYPE); if (supportedEncodings != null && supportedEncodings.contains("gzip")) { resp.setHeader("Content-Encoding", "gzip"); } } return new Request(req, resp, expectPayload(resp)); } @Test public void testCached() throws Exception { // First request - no cached value Request test1 = doGet( null, // No local checksum. null, // No encodings supported. SC_OK, true // Triggers a data read. ); // Second request - client performs conditional GET with wrong checksum. Request test2 = doGet( "foo", // Wrong checksum. null, // No encodings supported. SC_OK, false // No read. ); // Third request - client performs conditional GET with correct checksum. Request test3 = doGet( TEST_DATA_CHECKSUM, // Correct checksum. null, // No encodings supported. SC_NOT_MODIFIED, false // No read. ); control.replay(); AssetHandler handler = new AssetHandler(new StaticAsset(inputSupplier, CONTENT_TYPE, true)); handler.doGet(test1.req, test1.resp); assertThat(new String(test1.responseBody.toByteArray()), is(TEST_DATA)); handler.doGet(test2.req, test2.resp); assertThat(new String(test2.responseBody.toByteArray()), is(TEST_DATA)); handler.doGet(test3.req, test3.resp); assertThat(new String(test3.responseBody.toByteArray()), is("")); } @Test public void testCachedGzipped() throws Exception { // First request - no cached value Request test1 = doGet( null, // No local checksum. "gzip", // Supported encodings. SC_OK, true // Triggers a data read. ); // Second request - client performs conditional GET with wrong checksum. Request test2 = doGet( "foo", // Wrong checksum. "gzip,fakeencoding", // Supported encodings. SC_OK, false // No read. ); // Third request - client performs conditional GET with correct checksum. Request test3 = doGet( TEST_DATA_CHECKSUM, // Correct checksum. "gzip,deflate", // Supported encodings. SC_NOT_MODIFIED, false // No read. ); control.replay(); AssetHandler handler = new AssetHandler(new StaticAsset(inputSupplier, CONTENT_TYPE, true)); handler.doGet(test1.req, test1.resp); assertThat(unzip(test1.responseBody), is(TEST_DATA)); handler.doGet(test2.req, test2.resp); assertThat(unzip(test2.responseBody), is(TEST_DATA)); handler.doGet(test3.req, test3.resp); assertThat(new String(test3.responseBody.toByteArray()), is("")); } @Test public void testUncached() throws Exception { // First request - no cached value Request test1 = doGet( null, // No local checksum. null, // No encodings supported. SC_OK, true // Triggers a data read. ); // Second request - client performs conditional GET with wrong checksum. Request test2 = doGet( "foo", // Wrong checksum. null, // No encodings supported. SC_OK, true // Triggers a data read. ); // Third request - client performs conditional GET with correct checksum. Request test3 = doGet( TEST_DATA_CHECKSUM, // Correct checksum. null, // No encodings supported. SC_NOT_MODIFIED, true // Triggers a data read. ); control.replay(); AssetHandler handler = new AssetHandler(new StaticAsset(inputSupplier, CONTENT_TYPE, false)); handler.doGet(test1.req, test1.resp); assertThat(new String(test1.responseBody.toByteArray()), is(TEST_DATA)); handler.doGet(test2.req, test2.resp); assertThat(new String(test2.responseBody.toByteArray()), is(TEST_DATA)); handler.doGet(test3.req, test3.resp); assertThat(new String(test3.responseBody.toByteArray()), is("")); } @Test public void testUncachedGzipped() throws Exception { // First request - no cached value Request test1 = doGet( null, // No local checksum. "gzip", // Supported encodings. SC_OK, true // Triggers a data read. ); // Second request - client performs conditional GET with wrong checksum. Request test2 = doGet( "foo", // Wrong checksum. "gzip,fakeencoding", // Supported encodings. SC_OK, true // Triggers a data read. ); // Third request - client performs conditional GET with correct checksum. Request test3 = doGet( TEST_DATA_CHECKSUM, // Correct checksum. "gzip,deflate", // Supported encodings. SC_NOT_MODIFIED, true // Triggers a data read. ); control.replay(); AssetHandler handler = new AssetHandler(new StaticAsset(inputSupplier, CONTENT_TYPE, false)); handler.doGet(test1.req, test1.resp); assertThat(unzip(test1.responseBody), is(TEST_DATA)); handler.doGet(test2.req, test2.resp); assertThat(unzip(test2.responseBody), is(TEST_DATA)); handler.doGet(test3.req, test3.resp); assertThat(new String(test3.responseBody.toByteArray()), is("")); } private static ByteArrayOutputStream expectPayload(HttpServletResponse resp) throws Exception { ByteArrayOutputStream responseBody = new ByteArrayOutputStream(); expect(resp.getOutputStream()).andReturn(new FakeServletOutputStream(responseBody)); return responseBody; } private static String unzip(ByteArrayOutputStream streamData) throws IOException { ByteArrayInputStream in = new ByteArrayInputStream(streamData.toByteArray()); GZIPInputStream unzip = new GZIPInputStream(in); return new String(ByteStreams.toByteArray(unzip)); } private static class FakeServletOutputStream extends ServletOutputStream { private final OutputStream realStream; FakeServletOutputStream(OutputStream realStream) { this.realStream = realStream; } @Override public void write(int b) throws IOException { realStream.write(b); } @Override public void write(byte[] b) throws IOException { realStream.write(b); } @Override public void write(byte[] b, int off, int len) throws IOException { realStream.write(b, off, len); } @Override public void flush() throws IOException { realStream.flush(); } @Override public void close() throws IOException { realStream.close(); } @Override public void print(String s) throws IOException { throw new UnsupportedOperationException("Not implemented"); } @Override public void print(boolean b) throws IOException { throw new UnsupportedOperationException("Not implemented"); } @Override public void print(char c) throws IOException { throw new UnsupportedOperationException("Not implemented"); } @Override public void print(int i) throws IOException { throw new UnsupportedOperationException("Not implemented"); } @Override public void print(long l) throws IOException { throw new UnsupportedOperationException("Not implemented"); } @Override public void print(float f) throws IOException { throw new UnsupportedOperationException("Not implemented"); } @Override public void print(double d) throws IOException { throw new UnsupportedOperationException("Not implemented"); } @Override public void println() throws IOException { throw new UnsupportedOperationException("Not implemented"); } @Override public void println(String s) throws IOException { throw new UnsupportedOperationException("Not implemented"); } @Override public void println(boolean b) throws IOException { throw new UnsupportedOperationException("Not implemented"); } @Override public void println(char c) throws IOException { throw new UnsupportedOperationException("Not implemented"); } @Override public void println(int i) throws IOException { throw new UnsupportedOperationException("Not implemented"); } @Override public void println(long l) throws IOException { throw new UnsupportedOperationException("Not implemented"); } @Override public void println(float f) throws IOException { throw new UnsupportedOperationException("Not implemented"); } @Override public void println(double d) throws IOException { throw new UnsupportedOperationException("Not implemented"); } } }
// PathVisio, // a tool for data visualization and analysis using Biological Pathways // Copyright 2006-2011 BiGCaT Bioinformatics // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // package org.pathvisio.gui.dialogs; import java.awt.Component; import java.awt.Frame; import java.util.HashMap; import java.util.Map; import javax.swing.JOptionPane; import javax.swing.JTabbedPane; import org.pathvisio.core.model.Pathway; import org.pathvisio.core.model.PathwayElement; import org.pathvisio.core.model.StaticProperty; import org.pathvisio.core.view.UndoAction; import org.pathvisio.core.view.VPathway; import org.pathvisio.gui.SwingEngine; import org.pathvisio.gui.panels.CommentPanel; import org.pathvisio.gui.panels.LitReferencePanel; import org.pathvisio.gui.panels.PathwayElementPanel; /** * Dialog that allows you to display and edit properties of a PathwayElement * @author thomas * */ public class PathwayElementDialog extends OkCancelDialog { public static final String TAB_COMMENTS = "Comments"; public static final String TAB_LITERATURE = "Literature"; PathwayElement input; private JTabbedPane dialogPane; private Map<String, PathwayElementPanel> panels; private Map<StaticProperty, Object> state = new HashMap<StaticProperty, Object>(); private Pathway originalPathway; //Used for undo event protected boolean readonly; protected SwingEngine swingEngine; protected PathwayElementDialog(SwingEngine swingEngine, PathwayElement e, boolean readonly, Frame frame, String title, Component locationComp) { super(frame, title, locationComp, true); this.readonly = readonly; this.swingEngine = swingEngine; setDialogComponent(createDialogPane()); panels = new HashMap<String, PathwayElementPanel>(); createTabs(); setInput(e); setSize(450, 300); } protected Component createDialogPane() { dialogPane = new JTabbedPane(); return dialogPane; } /** * Get the pathway element for this dialog */ protected PathwayElement getInput() { return input; } /** * Set the pathway element for this dialog */ public void setInput(PathwayElement e) { input = e; storeState(); refresh(); } /** * Refresh the GUI components to reflect the current pathway element's properties. This * method automatically refreshes all registered PathwayElementPanels. * Subclasses may override this to update their own GUI components that are not added * as PathwayElementPanel. */ protected void refresh() { for(PathwayElementPanel p : panels.values()) { p.setInput(input); } } /** * Store the current state of the pathway element. This is used to cancel * the modifications made in the dialog. */ protected void storeState() { PathwayElement e = getInput(); originalPathway = (Pathway) e.getParent().clone(); for(StaticProperty t : e.getStaticPropertyKeys()) { state.put(t, e.getStaticProperty(t)); } } /** * Restore the original state of the pathway element. This is called when the * cancel button is pressed. */ protected void restoreState() { PathwayElement e = getInput(); for(StaticProperty t : state.keySet()) { e.setStaticProperty(t, state.get(t)); } } private void createTabs() { addPathwayElementPanel(TAB_COMMENTS, new CommentPanel()); addPathwayElementPanel(TAB_LITERATURE, new LitReferencePanel(swingEngine)); addCustomTabs(dialogPane); } /** * * @param tabLabel * @param p */ public void addPathwayElementPanel(String tabLabel, PathwayElementPanel p) { p.setReadOnly(readonly); dialogPane.add(tabLabel, p); panels.put(tabLabel, p); } public void removePathwayElementPanel(String tabLabel) { PathwayElementPanel panel = panels.get(tabLabel); if(panel != null) { dialogPane.remove(panel); panels.remove(panel); } } public void selectPathwayElementPanel(String tabLabel) { PathwayElementPanel panel = panels.get(tabLabel); if(panel != null) { dialogPane.setSelectedComponent(panel); } } /** * Override in subclass and use * {@link #addPathwayElementPanel(String, PathwayElementPanel)} to add a PathwayElementPanel, or * use {@link JTabbedPane#add(Component)}. * @param parent */ protected void addCustomTabs(JTabbedPane parent) { //To be implemented by subclasses } /** * Called when the OK button is pressed. Will close the dialog amd register an undo event. */ protected void okPressed() { boolean done = true; if(this instanceof DataNodeDialog || this instanceof LineDialog) { if(!input.getElementID().equals("") && input.getDataSource() == null) { done = false; JOptionPane.showMessageDialog(this, "You annotated this pathway element with an identifier but no database.\n Please specify a database system.", "Error", JOptionPane.ERROR_MESSAGE); } else if (input.getElementID().equals("") && input.getDataSource() != null) { done = false; JOptionPane.showMessageDialog(this, "You annotated this pathway element with a database but no identifier.\n Please specify an identifier.", "Error", JOptionPane.ERROR_MESSAGE); } } if(done) { VPathway p = swingEngine.getEngine().getActiveVPathway(); p.getUndoManager().newAction( new UndoAction("Modified element properties", originalPathway) ); if(p != null) p.redraw(); setVisible(false); } } /** * Called when the Cancel button is pressed. Will close the dialog and revert the * pathway element to it's original state. */ protected void cancelPressed() { restoreState(); setVisible(false); } }
/* * Copyright 2015-2016 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.hawkular.client.android.fragment; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import org.hawkular.client.android.R; import org.hawkular.client.android.backend.model.Operation; import org.hawkular.client.android.backend.model.OperationParameter; import org.hawkular.client.android.backend.model.Resource; import org.hawkular.client.android.util.Fragments; import org.hawkular.client.android.util.OperationManager; import org.jboss.aerogear.android.core.Callback; import org.json.JSONException; import org.json.JSONObject; import android.app.Dialog; import android.os.Bundle; import android.support.annotation.Nullable; import android.support.v7.app.AppCompatDialogFragment; import android.support.v7.widget.SwitchCompat; import android.support.v7.widget.Toolbar; import android.text.InputType; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.Button; import android.widget.CompoundButton; import android.widget.EditText; import android.widget.LinearLayout; import android.widget.ScrollView; import android.widget.TableLayout; import android.widget.TableRow; import android.widget.TextView; import timber.log.Timber; public class ConfirmOperationFragment extends AppCompatDialogFragment { private TableLayout table; private Button execute, cancel; private TextView operationDetail; private Toolbar toolbar; private SwitchCompat custom; private ScrollView scroll; private Callback<String> callback; private Resource resource; private Operation operation; public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { return inflater.inflate(R.layout.fragment_confirm_operation, container, false); } @Override public Dialog onCreateDialog(Bundle savedInstanceState) { return super.onCreateDialog(savedInstanceState); } @Override public void onViewCreated(View view, @Nullable Bundle savedInstanceState) { super.onViewCreated(view, savedInstanceState); toolbar = (Toolbar) view.findViewById(R.id.toolbar); table = (TableLayout) view.findViewById(R.id.operation_param_table); execute = (Button) view.findViewById(R.id.execute); cancel = (Button) view.findViewById(R.id.cancel); operationDetail = (TextView) view.findViewById(R.id.operation_detail); custom = (SwitchCompat) view.findViewById(R.id.custom); scroll = (ScrollView) view.findViewById(R.id.scroll); resource = getResource(); operation = getOperation(); scroll.setVisibility(View.GONE); toolbar.setTitle(R.string.operation_confirm_title); setOperationDetail(); createTable(); if (operation.getOperationProperties() == null || operation.getOperationProperties().getOperationParameters() == null || operation.getOperationProperties().getOperationParameters().size() == 0 ) { custom.setEnabled(false); } execute.setOnClickListener(new ClickListner()); cancel.setOnClickListener(new ClickListner()); custom.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton compoundButton, boolean checked) { if (checked) { scroll.setVisibility(View.VISIBLE); } else { scroll.setVisibility(View.GONE); } } }); } public void setCallback(Callback<String> callback) { this.callback = callback; } private void setOperationDetail() { operationDetail.setText(operation.getName() + " on " + resource.getName()); } private Resource getResource() { return getArguments().getParcelable(Fragments.Arguments.RESOURCE); } private Operation getOperation() { return getArguments().getParcelable(Fragments.Arguments.OPERATION); } private void createTable() { TableRow row = null; List<OperationParameter> operationParameters = operation.getOperationProperties().getOperationParameters(); if (operationParameters != null) { for (OperationParameter operationParameter : operationParameters) { switch (operationParameter.getType()) { case "string": { row = (TableRow) LayoutInflater.from(getActivity()).inflate(R.layout.row_edit, null); EditText data = (EditText) row.findViewById(R.id.data); data.setText(operationParameter.getDefaultValue()); data.setInputType(InputType.TYPE_CLASS_TEXT); break; } case "int": { row = (TableRow) LayoutInflater.from(getActivity()).inflate(R.layout.row_edit, null); EditText data = (EditText) row.findViewById(R.id.data); data.setText(operationParameter.getDefaultValue()); data.setInputType(InputType.TYPE_CLASS_NUMBER); break; } case "float": { row = (TableRow) LayoutInflater.from(getActivity()).inflate(R.layout.row_edit, null); EditText data = (EditText) row.findViewById(R.id.data); data.setText(operationParameter.getDefaultValue()); data.setInputType(InputType.TYPE_CLASS_NUMBER | InputType.TYPE_NUMBER_FLAG_DECIMAL); break; } case "bool": { row = (TableRow) LayoutInflater.from(getActivity()).inflate(R.layout.row_toggle, null); SwitchCompat data = (SwitchCompat) row.findViewById(R.id.data); data.setChecked(operationParameter.getDefaultValue().equals("true")); break; } } if (row != null) { TextView name = (TextView) row.findViewById(R.id.name); name.setText(operationParameter.getName()); table.addView(row); } } table.requestLayout(); } } private void sendRequest(Map<String, String> map) { JSONObject body = new JSONObject(); try { body.put("operationName", operation.getId()); body.put("resourcePath", resource.getPath()); if (custom.isChecked()) { Set<Map.Entry<String, String>> set = map.entrySet(); JSONObject params = new JSONObject(); body.put("parameters", params); for (Map.Entry<String, String> entry : set) { params.put(entry.getKey(), entry.getValue()); } } } catch (JSONException e) { Timber.e(e.getMessage()); } OperationManager operationManager = OperationManager.getInstance(getActivity(), callback); operationManager.sendRequest(body.toString()); dismiss(); } @Override public void onResume() { ViewGroup.LayoutParams params = getDialog().getWindow().getAttributes(); params.width = LinearLayout.LayoutParams.MATCH_PARENT; getDialog().getWindow().setAttributes((android.view.WindowManager.LayoutParams) params); super.onResume(); } private class ClickListner implements View.OnClickListener { @Override public void onClick(View view) { if (view.getId() == R.id.cancel) { dismiss(); } else if (view.getId() == R.id.execute) { boolean valid = true; Map<String, String> map = new HashMap<String, String>(); for (int i = 0; i < table.getChildCount(); i++) { View v = table.getChildAt(i); String name = ((TextView) v.findViewById(R.id.name)).getText().toString(); View data = v.findViewById(R.id.data); if (data instanceof SwitchCompat) { map.put(name, ((SwitchCompat) data).isChecked() ? "true" : "false"); } else if (data instanceof EditText) { String value = ((EditText) data).getText().toString(); if (value.trim().equals("")) { valid = false; ((EditText) data).setError(getString(R.string.error_empty)); } else { map.put(name, value); } } } if (valid) { sendRequest(map); } } } } }
package in.testpress.testpress.authenticator; import android.accounts.Account; import android.accounts.AccountManager; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.content.SharedPreferences; import android.content.pm.PackageManager; import android.os.Bundle; import android.os.CountDownTimer; import android.provider.Settings; import androidx.appcompat.app.AppCompatActivity; import android.text.Editable; import android.text.TextWatcher; import android.view.KeyEvent; import android.view.View; import android.widget.Button; import android.widget.EditText; import android.widget.LinearLayout; import android.widget.ProgressBar; import android.widget.TextView; import com.afollestad.materialdialogs.GravityEnum; import com.afollestad.materialdialogs.MaterialDialog; import com.google.android.gms.auth.api.phone.SmsRetriever; import java.io.IOException; import java.util.List; import javax.inject.Inject; import butterknife.ButterKnife; import butterknife.InjectView; import butterknife.OnClick; import in.testpress.core.TestpressCallback; import in.testpress.core.TestpressException; import in.testpress.core.TestpressSdk; import in.testpress.core.TestpressSession; import in.testpress.testpress.Injector; import in.testpress.testpress.R; import in.testpress.testpress.TestpressApplication; import in.testpress.testpress.core.Constants; import in.testpress.testpress.core.TestpressService; import in.testpress.testpress.events.SmsReceivingEvent; import in.testpress.testpress.models.DaoSession; import in.testpress.testpress.models.Device; import in.testpress.testpress.models.InstituteSettings; import in.testpress.testpress.models.InstituteSettingsDao; import in.testpress.testpress.models.PostDao; import in.testpress.testpress.models.RegistrationSuccessResponse; import in.testpress.testpress.models.RegistrationErrorDetails; import in.testpress.testpress.ui.MainActivity; import in.testpress.testpress.ui.PostActivity; import in.testpress.testpress.ui.TextWatcherAdapter; import in.testpress.testpress.util.GCMPreference; import in.testpress.testpress.util.InternetConnectivityChecker; import in.testpress.testpress.util.SafeAsyncTask; import retrofit.RetrofitError; import static android.view.inputmethod.EditorInfo.IME_ACTION_DONE; import static in.testpress.testpress.BuildConfig.APPLICATION_ID; import static in.testpress.testpress.BuildConfig.BASE_URL; public class CodeVerificationActivity extends AppCompatActivity { @Inject TestpressService testpressService; @InjectView(R.id.welcome) TextView welcomeText; @InjectView(R.id.verification_code_error) TextView verificationCodeError; @InjectView(R.id.et_username) EditText usernameText; @InjectView(R.id.et_verificationCode) EditText verificationCodeText; @InjectView(R.id.b_verify) Button verifyButton; @InjectView(R.id.progressbar) ProgressBar progressBar; @InjectView(R.id.count) TextView countText; @InjectView(R.id.sms_receiving_layout) LinearLayout smsReceivingLayout; private String username; private String password; private String authToken; private AccountManager accountManager; private RegistrationSuccessResponse codeResponse; private final TextWatcher watcher = validationTextWatcher(); private MaterialDialog progressDialog; private Context context=this; private SmsReceivingEvent smsReceivingEvent; private Timer timer; private InternetConnectivityChecker internetConnectivityChecker = new InternetConnectivityChecker(this); private PackageManager packageManager; private InstituteSettingsDao instituteSettingsDao; private InstituteSettings instituteSettings; @Override public void onCreate(Bundle bundle) { super.onCreate(bundle); Injector.inject(this); setContentView(R.layout.code_verify_activity); ButterKnife.inject(this); final Intent intent = getIntent(); fetchInstituteSettingLocalDB(); username = intent.getStringExtra("username"); password = intent.getStringExtra("password"); String phoneNumber = intent.getStringExtra("phoneNumber"); if(username == null){ usernameText.setVisibility(View.VISIBLE); verificationCodeText.setVisibility(View.VISIBLE); verifyButton.setVisibility(View.VISIBLE); smsReceivingLayout.setVisibility(View.GONE); usernameText.addTextChangedListener(watcher); } else { welcomeText.setText("Waiting to automatically detect an sms sent to " + phoneNumber + "\nIf you get the verification code press Manually Verify"); timer = new Timer(); smsReceivingEvent = new SmsReceivingEvent(timer); IntentFilter filter = new IntentFilter(); filter.addAction(SmsRetriever.SMS_RETRIEVED_ACTION); registerReceiver(smsReceivingEvent, filter); //Register SMS broadcast receiver timer.start(); // Start timer } verificationCodeText.addTextChangedListener(watcher); verificationCodeText.setOnEditorActionListener(new TextView.OnEditorActionListener() { public boolean onEditorAction(final TextView v, final int actionId, final KeyEvent event) { if (actionId == IME_ACTION_DONE && verifyButton.isEnabled()) { verify(); return true; } return false; } }); accountManager = AccountManager.get(this); } @OnClick(R.id.b_verify) public void verify() { if(internetConnectivityChecker.isConnected()) { if (username == null) { username = usernameText.getText().toString().trim(); } handleCodeVerification(); } else { internetConnectivityChecker.showAlert(); } } private TextWatcher validationTextWatcher() { return new TextWatcherAdapter() { public void afterTextChanged(final Editable EditTextBox) { updateUIWithValidation(); } }; } private void updateUIWithValidation() { final boolean populated; if(username == null) { populated = populated(verificationCodeText) && populated(usernameText); } else { populated = populated(verificationCodeText); } verifyButton.setEnabled(populated); } private boolean populated(final EditText editText) { return editText.getText().toString().trim().length() > 0; } // CountDownTimer class public class Timer extends CountDownTimer { public Timer() { super(30000, 1000); //super(startTime, interval); } @Override public void onFinish() { countText.setText("30s"); progressBar.setProgress(30); unregisterReceiver(smsReceivingEvent); //end receiver if (smsReceivingEvent.code != null) { //checking smsReceivingEvent get the code or not verificationCodeText.setText(smsReceivingEvent.code); handleCodeVerification(); //verify code } else { verificationCodeText.setVisibility(View.VISIBLE); //user have to enter code verifyButton.setVisibility(View.VISIBLE); smsReceivingLayout.setVisibility(View.GONE); } } @Override public void onTick(long millisUntilFinished) { countText.setText((int)(millisUntilFinished / 1000) + "s"); progressBar.setProgress(30 - (int)(millisUntilFinished / 1000)); } } @OnClick(R.id.b_manually_verify) public void manuallyVerify() { //user have to enter code timer.cancel(); timer.onFinish(); } // verify the verification code private void handleCodeVerification(){ progressDialog = new MaterialDialog.Builder(this) .title(R.string.message_verifying) .content(R.string.please_wait) .widgetColorRes(R.color.primary) .progress(true, 0).show(); new SafeAsyncTask<Boolean>() { public Boolean call() throws Exception { codeResponse = testpressService.verifyCode(username,verificationCodeText.getText().toString().trim()); return true; } @Override protected void onException(final Exception e) throws RuntimeException { progressDialog.dismiss(); // Retrofit Errors are handled if((e instanceof RetrofitError)) { RegistrationErrorDetails registrationErrorDetails = (RegistrationErrorDetails)((RetrofitError) e).getBodyAs(RegistrationErrorDetails.class); if(!registrationErrorDetails.getNonFieldErrors().isEmpty()) { verificationCodeError.setText(registrationErrorDetails.getNonFieldErrors().get(0)); verificationCodeError.setVisibility(View.VISIBLE); verificationCodeText.requestFocus(); } } } @Override public void onSuccess(final Boolean authSuccess) { //Successfully Verified setResult(RESULT_OK); if(password == null){ gotoLoginScreen(); } else { autoLogin(); } } }.execute(); } // check password & get authKey private void autoLogin() { in.testpress.models.InstituteSettings settings = new in.testpress.models.InstituteSettings(instituteSettings.getBaseUrl()) .setBookmarksEnabled(instituteSettings.getBookmarksEnabled()) .setCoursesFrontend(instituteSettings.getShowGameFrontend()) .setCoursesGamificationEnabled(instituteSettings.getCoursesEnableGamification()) .setCommentsVotingEnabled(instituteSettings.getCommentsVotingEnabled()).setAccessCodeEnabled(false); TestpressSdk.initialize(this, settings, username, password, TestpressSdk.Provider.TESTPRESS, new TestpressCallback<TestpressSession>() { @Override public void onSuccess(TestpressSession response) { //add account in mobile authToken = response.getToken(); testpressService.setAuthToken(authToken); final Account account = new Account(username, APPLICATION_ID); accountManager.addAccountExplicitly(account, password, null); accountManager.setAuthToken(account, APPLICATION_ID, authToken); updateDevice(); DaoSession daoSession = ((TestpressApplication) getApplicationContext()).getDaoSession(); PostDao postDao = daoSession.getPostDao(); postDao.deleteAll(); daoSession.clear(); Intent intent; switch (getIntent().getExtras().getString(Constants.DEEP_LINK_TO, "")) { case Constants.DEEP_LINK_TO_POST: intent = new Intent(CodeVerificationActivity.this, PostActivity.class); intent.putExtra(Constants.IS_DEEP_LINK, true); intent.putExtras(getIntent().getExtras()); break; default: intent = new Intent(CodeVerificationActivity.this, MainActivity.class); break; } startActivity(intent); finish(); } @Override public void onException(TestpressException e) { gotoLoginScreen(); } }); } private void updateDevice() { final SharedPreferences sharedPreferences = getSharedPreferences(Constants.GCM_PREFERENCE_NAME, Context.MODE_PRIVATE); sharedPreferences.edit().putBoolean(GCMPreference.SENT_TOKEN_TO_SERVER, false).apply(); new SafeAsyncTask<Device>() { @Override public Device call() throws Exception { String token = GCMPreference.getRegistrationId(getApplicationContext()); return testpressService.registerDevice(token, Settings.Secure.getString(getContentResolver(), Settings.Secure.ANDROID_ID)); } @Override protected void onException(Exception e) throws RuntimeException { sharedPreferences.edit().putBoolean(GCMPreference.SENT_TOKEN_TO_SERVER, false).apply(); } @Override protected void onSuccess(final Device device) throws Exception { sharedPreferences.edit().putBoolean(GCMPreference.SENT_TOKEN_TO_SERVER, true).apply(); } }.execute(); } private void gotoLoginScreen(){ new MaterialDialog.Builder(context) .title("Code successfully verified") .content("Please login to continue") .neutralText(R.string.ok) .neutralColorRes(R.color.primary) .buttonsGravity(GravityEnum.CENTER) .cancelable(false) .callback(new MaterialDialog.ButtonCallback() { @Override public void onNeutral(MaterialDialog dialog) { Intent intent = new Intent(CodeVerificationActivity.this, MainActivity.class); //call main activity, it will show login screen startActivity(intent); finish(); } }) .show(); } @Override public void onBackPressed() { if(username == null) { //onBackPressed go to login screen only if username is null Intent intent = new Intent(CodeVerificationActivity.this, MainActivity.class); startActivity(intent); finish(); } } private void fetchInstituteSettingLocalDB() { DaoSession daoSession = ((TestpressApplication) getApplicationContext()).getDaoSession(); instituteSettingsDao = daoSession.getInstituteSettingsDao(); List<InstituteSettings> instituteSettingsList = instituteSettingsDao.queryBuilder() .where(InstituteSettingsDao.Properties.BaseUrl.eq(BASE_URL)) .list(); if (instituteSettingsList.size() == 0) { getInstituteSettings(); } else { instituteSettings = instituteSettingsList.get(0); } } private void getInstituteSettings() { progressBar.setVisibility(View.VISIBLE); new SafeAsyncTask<InstituteSettings>() { @Override public InstituteSettings call() throws Exception { return testpressService.getInstituteSettings(); } @Override protected void onException(Exception exception) throws RuntimeException { if (exception.getCause() instanceof IOException) { internetConnectivityChecker.showAlert(); } else { internetConnectivityChecker.showAlert(); } progressBar.setVisibility(View.GONE); } @Override protected void onSuccess(InstituteSettings instituteSettings) throws Exception { instituteSettings.setBaseUrl(BASE_URL); instituteSettingsDao.insertOrReplace(instituteSettings); progressBar.setVisibility(View.GONE); } }.execute(); } }
/* * Copyright 2019 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.go.domain.scm; import com.google.gson.annotations.Expose; import com.google.gson.annotations.SerializedName; import com.thoughtworks.go.config.*; import com.thoughtworks.go.config.builder.ConfigurationPropertyBuilder; import com.thoughtworks.go.config.materials.AbstractMaterialConfig; import com.thoughtworks.go.config.remote.ConfigOrigin; import com.thoughtworks.go.config.remote.ConfigOriginTraceable; import com.thoughtworks.go.config.validation.NameTypeValidator; import com.thoughtworks.go.domain.ConfigErrors; import com.thoughtworks.go.domain.config.Configuration; import com.thoughtworks.go.domain.config.ConfigurationProperty; import com.thoughtworks.go.domain.config.ConfigurationValue; import com.thoughtworks.go.domain.config.PluginConfiguration; import com.thoughtworks.go.domain.ConfigurationDisplayUtil; import com.thoughtworks.go.plugin.access.scm.SCMConfiguration; import com.thoughtworks.go.plugin.access.scm.SCMConfigurations; import com.thoughtworks.go.plugin.access.scm.SCMMetadataStore; import com.thoughtworks.go.plugin.api.config.Property; import com.thoughtworks.go.util.CachedDigestUtils; import org.apache.commons.lang3.StringUtils; import javax.annotation.PostConstruct; import java.io.Serializable; import java.util.*; import static java.lang.String.format; import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isEmpty; @ConfigTag("scm") @ConfigReferenceCollection(collectionName = "scms", idFieldName = "id") public class SCM implements Serializable, Validatable, ConfigOriginTraceable { public static final String SCM_ID = "scmId"; public static final String NAME = "name"; public static final String AUTO_UPDATE = "autoUpdate"; public static final String PLUGIN_CONFIGURATION = "pluginConfiguration"; public static final String VALUE_KEY = "value"; public static final String ERRORS_KEY = "errors"; private ConfigErrors errors = new ConfigErrors(); private transient ConfigOrigin origin; @ConfigAttribute(value = "id", allowNull = true) private String id; @ConfigAttribute(value = "name", allowNull = false) private String name; @ConfigAttribute(value = "autoUpdate", optional = true) private boolean autoUpdate = true; @Expose @SerializedName("plugin") @ConfigSubtag private PluginConfiguration pluginConfiguration = new PluginConfiguration(); @Expose @SerializedName("config") @ConfigSubtag private Configuration configuration = new Configuration(); public SCM() { } public SCM(String id, String name) { this.id = id; this.name = name; } public SCM(String id, PluginConfiguration pluginConfiguration, Configuration configuration) { this.id = id; this.pluginConfiguration = pluginConfiguration; this.configuration = configuration; } public String getId() { return id; } //used in erb as it cannot access id attribute as it treats 'id' as keyword public String getSCMId() { return getId(); } public void setId(String id) { this.id = id; } public String getName() { return name; } public void setName(String name) { this.name = name; } public boolean isAutoUpdate() { return autoUpdate; } public void setAutoUpdate(boolean autoUpdate) { this.autoUpdate = autoUpdate; } public PluginConfiguration getPluginConfiguration() { return pluginConfiguration; } public void setPluginConfiguration(PluginConfiguration pluginConfiguration) { this.pluginConfiguration = pluginConfiguration; } public Configuration getConfiguration() { return configuration; } public void setConfiguration(Configuration configuration) { this.configuration = configuration; } public void addConfigurations(List<ConfigurationProperty> configurations) { ConfigurationPropertyBuilder builder = new ConfigurationPropertyBuilder(); for (ConfigurationProperty property : configurations) { SCMConfigurations scmConfigurations = SCMMetadataStore.getInstance().getConfigurationMetadata(getPluginId()); if (isValidPluginConfiguration(property.getConfigKeyName(), scmConfigurations)) { configuration.add(builder.create(property.getConfigKeyName(), property.getConfigValue(), property.getEncryptedValue(), scmConfigurationFor(property.getConfigKeyName(), scmConfigurations).getOption(SCMConfiguration.SECURE))); } else { configuration.add(property); } } } private boolean isValidPluginConfiguration(String configKey, SCMConfigurations scmConfigurations) { return doesPluginExist() && scmConfigurationFor(configKey, scmConfigurations) != null; } private SCMConfiguration scmConfigurationFor(String configKey, SCMConfigurations scmConfigurations) { return scmConfigurations.get(configKey); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } SCM that = (SCM) o; if (id != null ? !id.equals(that.id) : that.id != null) { return false; } if (name != null ? !name.equals(that.name) : that.name != null) { return false; } if (pluginConfiguration != null ? !pluginConfiguration.equals(that.pluginConfiguration) : that.pluginConfiguration != null) { return false; } if (configuration != null ? !configuration.equals(that.configuration) : that.configuration != null) { return false; } return true; } @Override public int hashCode() { int result = id != null ? id.hashCode() : 0; result = 31 * result + (name != null ? name.hashCode() : 0); result = 31 * result + (pluginConfiguration != null ? pluginConfiguration.hashCode() : 0); result = 31 * result + (configuration != null ? configuration.hashCode() : 0); return result; } @Override public void validate(ValidationContext validationContext) { if (isBlank(name)) { errors().add(NAME, "Please provide name"); } else if (new NameTypeValidator().isNameInvalid(name)) { errors().add(NAME, NameTypeValidator.errorMessage("SCM", name)); } configuration.validateTree(); configuration.validateUniqueness(String.format("SCM '%s'", name)); } @Override public ConfigErrors errors() { return errors; } @Override public void addError(String fieldName, String message) { errors.add(fieldName, message); } public Map<String, Map<String, String>> getConfigAsMap() { Map<String, Map<String, String>> configMap = new HashMap<>(); for (ConfigurationProperty property : configuration) { Map<String, String> mapValue = new HashMap<>(); mapValue.put(VALUE_KEY, property.getValue()); if (!property.errors().isEmpty()) { mapValue.put(ERRORS_KEY, StringUtils.join(property.errors().getAll(), ", ")); } configMap.put(property.getConfigKeyName(), mapValue); } return configMap; } public String getConfigForDisplay() { String pluginId = getPluginId(); SCMMetadataStore metadataStore = SCMMetadataStore.getInstance(); List<ConfigurationProperty> propertiesToBeUsedForDisplay = ConfigurationDisplayUtil.getConfigurationPropertiesToBeUsedForDisplay(metadataStore, pluginId, configuration); String prefix = metadataStore.hasPlugin(pluginId) ? "" : "WARNING! Plugin missing. "; return prefix + configuration.forDisplay(propertiesToBeUsedForDisplay); } private String getPluginId() { return pluginConfiguration.getId(); } public Boolean doesPluginExist(){ return SCMMetadataStore.getInstance().hasPlugin(getPluginId()); } @PostConstruct public void applyPluginMetadata() { String pluginId = getPluginId(); for (ConfigurationProperty configurationProperty : configuration) { SCMMetadataStore scmMetadataStore = SCMMetadataStore.getInstance(); if (scmMetadataStore.getConfigurationMetadata(pluginId) != null) { boolean isSecureProperty = scmMetadataStore.hasOption(pluginId, configurationProperty.getConfigurationKey().getName(), SCMConfiguration.SECURE); configurationProperty.handleSecureValueConfiguration(isSecureProperty); } } } public void setConfigAttributes(Object attributes) { Map attributesMap = (Map) attributes; if (attributesMap.containsKey(SCM_ID)) { id = ((String) attributesMap.get(SCM_ID)); } if (attributesMap.containsKey(NAME)) { name = ((String) attributesMap.get(NAME)); } this.setAutoUpdate("true".equals(attributesMap.get(AUTO_UPDATE))); if (attributesMap.containsKey(PLUGIN_CONFIGURATION)) { pluginConfiguration.setConfigAttributes(attributesMap.get(PLUGIN_CONFIGURATION)); } setPluginConfigurationAttributes(attributesMap); } protected void setPluginConfigurationAttributes(Map attributes) { SCMConfigurations scmConfigurations = SCMMetadataStore.getInstance().getConfigurationMetadata(pluginConfiguration.getId()); if (scmConfigurations == null) { throw new RuntimeException("metadata unavailable for plugin: " + pluginConfiguration.getId()); } for (SCMConfiguration scmConfiguration : scmConfigurations.list()) { String key = scmConfiguration.getKey(); if (attributes.containsKey(key)) { if (configuration.getProperty(key) == null) { configuration.addNewConfiguration(scmConfiguration.getKey(), scmConfiguration.getOption(Property.SECURE)); } configuration.getProperty(key).setConfigurationValue(new ConfigurationValue((String) attributes.get(key))); configuration.getProperty(key).handleSecureValueConfiguration(scmConfiguration.getOption(Property.SECURE)); } } } public String getFingerprint() { List<String> list = new ArrayList<>(); list.add(format("%s=%s", "plugin-id", getPluginId())); handleSCMProperties(list); String fingerprint = StringUtils.join(list, AbstractMaterialConfig.FINGERPRINT_DELIMITER); // CAREFUL! the hash algorithm has to be same as the one used in 47_create_new_materials.sql return CachedDigestUtils.sha256Hex(fingerprint); } private void handleSCMProperties(List<String> list) { SCMConfigurations metadata = SCMMetadataStore.getInstance().getConfigurationMetadata(getPluginId()); for (ConfigurationProperty configurationProperty : configuration) { handleProperty(list, metadata, configurationProperty); } } private void handleProperty(List<String> list, SCMConfigurations metadata, ConfigurationProperty configurationProperty) { SCMConfiguration scmConfiguration = null; if (metadata != null) { scmConfiguration = metadata.get(configurationProperty.getConfigurationKey().getName()); } if (scmConfiguration == null || scmConfiguration.getOption(SCMConfiguration.PART_OF_IDENTITY)) { list.add(configurationProperty.forFingerprint()); } } public boolean isNew() { return isEmpty(id); } public void clearEmptyConfigurations() { configuration.clearEmptyConfigurations(); } public List<ConfigErrors> getAllErrors() { return ErrorCollector.getAllErrors(this); } @PostConstruct public void ensureIdExists() { if (isBlank(getId())) { setId(UUID.randomUUID().toString()); } } public String getSCMType() { return "pluggable_material_" + getPluginConfiguration().getId().replaceAll("[^a-zA-Z0-9_]", "_"); } public boolean isLocal() { return this.origin == null || this.origin.isLocal(); } @Override public ConfigOrigin getOrigin() { return origin; } public void setOrigins(ConfigOrigin origin) { this.origin = origin; } }
/* * Copyright (c) 2002-2009 "Neo Technology," * Network Engine for Objects in Lund AB [http://neotechnology.com] * * This file is part of Neo4j. * * Neo4j is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.neo4j.kernel.impl.nioneo.store; import java.util.LinkedList; import java.io.File; import java.io.FileOutputStream; import java.io.RandomAccessFile; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.channels.FileChannel; /** * This class generates unique ids for a resource type. For example, nodes in a * nodes space are connected to each other via relationships. On nodes and * relationship one can add properties. We have three different resource types * here (nodes, relationships and properties) where each resource needs a unique * id to be able to differ resources of the same type from each other. Creating * three id generators (one for each resource type ) will do the trick. * <p> * <CODE>IdGenerator</CODE> makes use of so called "defragged" ids. A * defragged id is an id that has been in use one or many times but the resource * that was using it doesn't exist anymore. This makes it possible to reuse the * id and that in turn makes it possible to write a resource store with fixed * records and size (you can calculate the position of a record by knowing the * id without using indexes or a translation table). * <p> * The int value returned by the {@link #nextId} may not be the lowest you * available id but will be one of the defragged ids if such exist or the next * new free id that has never been used. * <p> * The {@link #freeId} will not check if the id passed in to it really is free. * Passing a non free id will corrupt the id generator and {@link #nextId} * method will eventually return that id. * <p> * The {@link #close()} method must always be invoked when done using an * generator (for this time). Failure to do will render the generator as * "sticky" and unusable next time you try to initialize a generator using the * same file. Also you can only have one <CODE>IdGenerator</CODE> instance per * id generator file at the same time. * <p> * In case of disk/file I/O failure an <CODE>IOException</CODE> is thrown. */ public class IdGeneratorImpl implements IdGenerator { // sticky(byte), nextFreeId(long) private static final int HEADER_SIZE = 9; // if sticky the id generator wasn't closed properly so it has to be // rebuilt (go through the node, relationship, property, rel type etc files) private static final byte CLEAN_GENERATOR = (byte) 0; private static final byte STICKY_GENERATOR = (byte) 1; private static final long OVERFLOW_ID = 4294967294l; // number of defragged ids to grab form file in batch (also used for write) private int grabSize = -1; private long nextFreeId = -1; // total bytes read from file, used in writeIdBatch() and close() private long totalBytesRead = 0; // true if more defragged ids can be read from file private boolean haveMore = true; // marks where this sessions released ids will be written private long readBlocksTo = HEADER_SIZE; // used to calculate number of ids actually in use private long defraggedIdCount = -1; private final String fileName; private FileChannel fileChannel = null; // in memory defragged ids read from file (and from freeId) private final LinkedList<Long> defragedIdList = new LinkedList<Long>(); // in memory newly free defragged ids that havn't been flushed to disk yet private final LinkedList<Long> releasedIdList = new LinkedList<Long>(); // buffer used in readIdBatch() private ByteBuffer readBuffer = null; // buffer used in writeIdBatch() and close() private ByteBuffer writeBuffer = null; /** * Opens the id generator represented by <CODE>fileName</CODE>. The * <CODE>grabSize</CODE> means how many defragged ids we should keep in * memory and is also the size (x4) of the two buffers used for reading and * writing to the id generator file. The highest returned id will be read * from file and if <CODE>grabSize</CODE> number of ids exist they will be * read into memory (if less exist all defragged ids will be in memory). * <p> * If this id generator hasn't been closed properly since the previous * session (sticky) an <CODE>IOException</CODE> will be thrown. When this * happens one has to rebuild the id generator from the (node/rel/prop) * store file. * * @param fileName * The file name (and path if needed) for the id generator to be * opened * @param grabSize * The number of defragged ids to keep in memory * @throws IOException * If no such file exist or if the id generator is sticky */ public IdGeneratorImpl( String fileName, int grabSize ) { if ( grabSize < 1 ) { throw new IllegalArgumentException( "Illegal grabSize: " + grabSize ); } this.fileName = fileName; this.grabSize = grabSize; readBuffer = ByteBuffer.allocate( grabSize * 8 ); writeBuffer = ByteBuffer.allocate( grabSize * 8 ); initGenerator(); } /** * Returns the next "free" id. If a defragged id exist it will be returned * else the next free id that hasn't been used yet is returned. If no id * exist the capacity is exceeded (all int values >= 0 are taken) and a * <CODE>IOException</CODE> will be thrown. * * @return The next free id * @throws IOException * If the capacity is exceeded or closed generator */ public synchronized long nextId() { if ( fileChannel == null ) { throw new IllegalStateException( "Closed id generator " + fileName ); } if ( defragedIdList.size() > 0 ) { long id = defragedIdList.removeFirst(); if ( haveMore && defragedIdList.size() == 0 ) { readIdBatch(); } defraggedIdCount--; return id; } if ( nextFreeId >= OVERFLOW_ID || nextFreeId < 0 ) { throw new UnderlyingStorageException( "Id capacity exceeded" ); } return nextFreeId++; } /** * Sets the next free "high" id. This method should be called when an id * generator has been rebuilt. * * @param id * The next free id */ public synchronized void setHighId( long id ) { nextFreeId = id; } /** * Returns the next "high" id that will be returned if no defragged ids * exist. * * @return The next free "high" id */ public synchronized long getHighId() { return nextFreeId; } /** * Frees the <CODE>id</CODE> making it a defragged id that will be * returned by next id before any new id (that hasn't been used yet) is * returned. * <p> * This method will throw an <CODE>IOException</CODE> if id is negative or * if id is greater than the highest returned id. However as stated in the * class documentation above the id isn't validated to see if it really is * free. * * @param id * The id to be made available again * @throws IOException * If id is negative or greater than the highest returned id */ public synchronized void freeId( long id ) { if ( id < 0 || id >= nextFreeId ) { throw new IllegalArgumentException( "Illegal id[" + id + "]" ); } if ( fileChannel == null ) { throw new IllegalStateException( "Generator closed " + fileName ); } releasedIdList.add( id ); defraggedIdCount++; if ( releasedIdList.size() >= grabSize ) { writeIdBatch(); } } /** * Closes the id generator flushing defragged ids in memory to file. The * file will be truncated to the minimal size required to hold all defragged * ids and it will be marked as clean (not sticky). * <p> * An invoke to the <CODE>nextId</CODE> or <CODE>freeId</CODE> after * this method has been invoked will result in an <CODE>IOException</CODE> * since the highest returned id has been set to a negative value. * * @throws IOException * If unable to close this id generator */ public synchronized void close() { if ( nextFreeId == -1 ) { return; } // write out lists if ( releasedIdList.size() > 0 ) { writeIdBatch(); } if ( defragedIdList.size() > 0 ) { while ( defragedIdList.size() > 0 ) { releasedIdList.add( defragedIdList.removeFirst() ); } writeIdBatch(); } // write header try { fileChannel.position( 0 ); ByteBuffer buffer = ByteBuffer.allocate( HEADER_SIZE ); buffer.put( STICKY_GENERATOR ).putLong( nextFreeId ); buffer.flip(); fileChannel.write( buffer ); // move data to remove fragmentation in file if ( totalBytesRead > HEADER_SIZE ) { long writePosition = HEADER_SIZE; long readPosition = readBlocksTo; if ( totalBytesRead < readBlocksTo ) { readPosition = totalBytesRead; } int bytesRead = -1; do { writeBuffer.clear(); fileChannel.position( readPosition ); bytesRead = fileChannel.read( writeBuffer ); readPosition += bytesRead; writeBuffer.flip(); fileChannel.position( writePosition ); writePosition += fileChannel.write( writeBuffer ); } while ( bytesRead > 0 ); // truncate fileChannel.truncate( writePosition ); } // flush fileChannel.force( false ); // remove sticky buffer.clear(); buffer.put( CLEAN_GENERATOR ); buffer.limit( 1 ); buffer.flip(); fileChannel.position( 0 ); fileChannel.write( buffer ); // flush and close fileChannel.force( false ); fileChannel.close(); fileChannel = null; // make this generator unusable nextFreeId = -1; } catch ( IOException e ) { throw new UnderlyingStorageException( "Unable to close id generator " + fileName, e ); } } /** * Returns the file associated with this id generator. * * @return The id generator's file name */ public String getFileName() { return this.fileName; } /** * Creates a new id generator. * * @param fileName * The name of the id generator * @throws IOException * If unable to create the id generator */ public static void createGenerator( String fileName ) { // sanity checks if ( fileName == null ) { throw new IllegalArgumentException( "Null filename" ); } File file = new File( fileName ); if ( file.exists() ) { throw new IllegalStateException( "Can't create IdGeneratorFile[" + fileName + "], file already exists" ); } try { FileChannel channel = new FileOutputStream( fileName ).getChannel(); // write the header ByteBuffer buffer = ByteBuffer.allocate( HEADER_SIZE ); buffer.put( CLEAN_GENERATOR ).putLong( 0 ).flip(); channel.write( buffer ); channel.force( false ); channel.close(); } catch ( IOException e ) { throw new UnderlyingStorageException( "Unable to create id generator" + fileName, e ); } } // initialize the id generator and performs a simple validation private synchronized void initGenerator() { try { fileChannel = new RandomAccessFile( fileName, "rw" ).getChannel(); ByteBuffer buffer = ByteBuffer.allocate( HEADER_SIZE ); totalBytesRead = fileChannel.read( buffer ); if ( totalBytesRead != HEADER_SIZE ) { fileChannel.close(); throw new InvalidIdGeneratorException( "Unable to read header, bytes read: " + totalBytesRead ); } buffer.flip(); byte storageStatus = buffer.get(); if ( storageStatus != CLEAN_GENERATOR ) { fileChannel.close(); throw new InvalidIdGeneratorException( "Sticky generator[ " + fileName + "] delete this id generator and build a new one" ); } this.nextFreeId = buffer.getLong(); buffer.flip(); buffer.put( STICKY_GENERATOR ).limit( 1 ).flip(); fileChannel.position( 0 ); fileChannel.write( buffer ); fileChannel.position( HEADER_SIZE ); readBlocksTo = fileChannel.size(); defraggedIdCount = (int) (readBlocksTo - HEADER_SIZE) / 8; readIdBatch(); } catch ( IOException e ) { throw new UnderlyingStorageException( "Unable to init id generator " + fileName, e ); } } private void readIdBatch() { if ( !haveMore ) { return; } if ( totalBytesRead >= readBlocksTo ) { haveMore = false; return; } try { if ( totalBytesRead + readBuffer.capacity() > readBlocksTo ) { readBuffer.clear(); readBuffer .limit( (int) (readBlocksTo - fileChannel.position()) ); } else { readBuffer.clear(); } fileChannel.position( totalBytesRead ); int bytesRead = fileChannel.read( readBuffer ); assert fileChannel.position() <= readBlocksTo; totalBytesRead += bytesRead; readBuffer.flip(); assert (bytesRead % 8) == 0; int idsRead = bytesRead / 8; defraggedIdCount -= idsRead; for ( int i = 0; i < idsRead; i++ ) { long id = readBuffer.getLong(); defragedIdList.add( id ); } } catch ( IOException e ) { throw new UnderlyingStorageException( "Failed reading defragged id batch", e ); } } // writes a batch of defragged ids to file private void writeIdBatch() { // position at end try { fileChannel.position( fileChannel.size() ); writeBuffer.clear(); while ( releasedIdList.size() > 0 ) { writeBuffer.putLong( releasedIdList.removeFirst() ); if ( writeBuffer.position() == writeBuffer.capacity() ) { writeBuffer.flip(); fileChannel.write( writeBuffer ); writeBuffer.clear(); } } writeBuffer.flip(); fileChannel.write( writeBuffer ); // position for next readIdBatch fileChannel.position( totalBytesRead ); } catch ( IOException e ) { throw new UnderlyingStorageException( "Unable to write defragged id " + " batch", e ); } } /** * Utility method that will dump all defragged id's and the "high id" to * console. Do not call while running store using this id generator since it * could corrupt the id generator (not thread safe). This method will close * the id generator after being invoked. * * @throws IOException * If problem dumping free ids */ public synchronized void dumpFreeIds() { while ( haveMore ) { readIdBatch(); } java.util.Iterator<Long> itr = defragedIdList.iterator(); while ( itr.hasNext() ) { System.out.print( " " + itr.next() ); } System.out.println( "\nNext free id: " + nextFreeId ); close(); } public synchronized long getNumberOfIdsInUse() { return nextFreeId - defraggedIdCount; } }
import engine.Game; import engine.geometry.Geometry; import engine.input.Keyboard; import engine.input.Mouse; import engine.material.Material; import engine.material.PhongMaterial; import engine.object3d.Mesh; import engine.object3d.Object3d; import engine.object3d.Scene; import engine.object3d.camera.Camera; import engine.object3d.camera.PerspectiveCamera; import engine.object3d.light.Light; import engine.util.Color; import engine.util.Draw3dUtils; import org.joml.Vector3f; import static org.lwjgl.glfw.GLFW.*; public class Demo extends Game { private Scene scene; private PerspectiveCamera camera; private Keyboard keyboard; private Mouse mouse; private Vector3f cameraMotion; Object3d object1; Mesh mesh1; Mesh mesh2; private int width; private int height; private boolean shouldClose = false; public Demo() { this(DEFAULT_WIDTH, DEFAULT_HEIGHT); } public Demo(int width, int height) { this.width = width; this.height = height; } @Override public void init() { scene = new Scene(); camera = new PerspectiveCamera(75, (float)(width)/(height), 0.01f, 10000); keyboard = new Keyboard(); mouse = new Mouse(); camera.moveForward(-10); camera.moveUp(1); Material basicMaterial = new Material(); //80 realistic //0.1f use for color mixing Material phongMaterial = new PhongMaterial(Color.WHITE, 80); Geometry mesh1Geo = Draw3dUtils.cubeGeometry(1, 1, 1, Color.RED); Geometry mesh2Geo = Draw3dUtils.cubeGeometry(0.5f, 2, 0.5f, Color.BLUE); Geometry lightBallGeo = Draw3dUtils.sphereGeometry(0.1f, 10, 10, Color.WHITE); Geometry gridGeo = Draw3dUtils.gridHelper(10, 1); Geometry axisGeo = Draw3dUtils.axisHelper(1); Geometry wallGeo = Draw3dUtils.cubeGeometry(10, 10, 0.5f, new Color(1, 1, 1)); Mesh gridMesh = new Mesh(gridGeo, basicMaterial); Mesh axisMesh = new Mesh(axisGeo, basicMaterial); scene.add(gridMesh); scene.add(axisMesh); mesh1 = new Mesh(mesh1Geo, phongMaterial); mesh2 = new Mesh(mesh2Geo, phongMaterial); Mesh lightBall = new Mesh(lightBallGeo, basicMaterial); Mesh wallMesh = new Mesh(wallGeo, phongMaterial); Mesh wallMesh2 = new Mesh(wallGeo, phongMaterial); wallMesh.rotateY(90, true); wallMesh.translate(new Vector3f(5, 0, 0)); wallMesh2.translate(new Vector3f(0, 0, -5)); scene.add(wallMesh); scene.add(wallMesh2); object1 = new Object3d(); scene.add(object1); object1.addChild(mesh1); mesh1.translate(new Vector3f(0, 1, 0)); mesh1.addChild(mesh2); mesh2.translate(new Vector3f(2, 0, 0)); float ambient = 0.001f; float attenuation = 0.1f; Light light = new Light(Color.WHITE, attenuation, ambient); light.addChild(lightBall); mesh2.addChild(light); light.moveForward(-1); } @Override public void update(float delta) { // mesh2.moveForward(5f * delta); object1.rotate(new Vector3f(1, 0, 0), 50 * delta); mesh1.rotate(new Vector3f(0, 0, 1), 50 * delta); mesh2.rotate(new Vector3f(0, 1, 0), 50 * delta); updateCamera(delta); } private void updateCamera(float delta) { float moveAmt = 5f * delta; float rotationMultiplier = 35f * delta; float xRotAmt; float yRotAmt; cameraMotion = new Vector3f(); if (keyboard.isKeyDown(GLFW_KEY_ESCAPE)) { shouldClose = true; } if (keyboard.isKeyDown(GLFW_KEY_W)) { cameraMotion.add(new Vector3f(0, 0, -moveAmt)); } if (keyboard.isKeyDown(GLFW_KEY_S)) { cameraMotion.add(new Vector3f(0, 0, moveAmt)); } if (keyboard.isKeyDown(GLFW_KEY_A)) { cameraMotion.add(new Vector3f(-moveAmt, 0, 0)); } if (keyboard.isKeyDown(GLFW_KEY_D)) { cameraMotion.add(new Vector3f(moveAmt, 0, 0)); } if (keyboard.isKeyDown(GLFW_KEY_SPACE)) { cameraMotion.add(new Vector3f(0, moveAmt, 0)); } if (keyboard.isKeyDown(GLFW_KEY_LEFT_SHIFT)) { cameraMotion.add(new Vector3f(0, -moveAmt, 0)); } camera.translateRelativeToRotation(cameraMotion); if (mouse.isButtonDown(GLFW_MOUSE_BUTTON_2)) { xRotAmt= (float) mouse.getDeltaX() * rotationMultiplier; yRotAmt= (float) mouse.getDeltaY() * rotationMultiplier; camera.rotateY(-xRotAmt, true); camera.rotateX(-yRotAmt); } } @Override public Scene getScene() { return scene; } @Override public Camera getCamera() { return camera; } @Override public Keyboard getKeyboard() { return keyboard; } @Override public Mouse getMouse() { return mouse; } @Override public int getWidth() { return DEFAULT_WIDTH; } @Override public int getHeight() { return DEFAULT_HEIGHT; } @Override public int getTargetFps() { return DEFAULT_TARGET_FPS; } @Override public int getTargetUps() { return DEFAULT_TARGET_UPS; } @Override public boolean shouldClose() { return shouldClose; } }
/** * Copyright (c) 2012, Ben Fortuna * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * o Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * o Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * o Neither the name of Ben Fortuna nor the names of any other contributors * may be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package net.fortuna.ical4j.model.component; import java.util.HashMap; import java.util.Map; import net.fortuna.ical4j.model.Date; import net.fortuna.ical4j.model.Property; import net.fortuna.ical4j.model.PropertyList; import net.fortuna.ical4j.model.ValidationException; import net.fortuna.ical4j.model.Validator; import net.fortuna.ical4j.model.property.Clazz; import net.fortuna.ical4j.model.property.Created; import net.fortuna.ical4j.model.property.Description; import net.fortuna.ical4j.model.property.DtStamp; import net.fortuna.ical4j.model.property.DtStart; import net.fortuna.ical4j.model.property.LastModified; import net.fortuna.ical4j.model.property.Method; import net.fortuna.ical4j.model.property.Organizer; import net.fortuna.ical4j.model.property.RecurrenceId; import net.fortuna.ical4j.model.property.Sequence; import net.fortuna.ical4j.model.property.Status; import net.fortuna.ical4j.model.property.Summary; import net.fortuna.ical4j.model.property.Uid; import net.fortuna.ical4j.model.property.Url; import net.fortuna.ical4j.util.CompatibilityHints; import net.fortuna.ical4j.util.PropertyValidator; /** * $Id$ [Apr 5, 2004] * * Defines an iCalendar VJOURNAL component. * * <pre> * 4.6.3 Journal Component * * Component Name: VJOURNAL * * Purpose: Provide a grouping of component properties that describe a * journal entry. * * Formal Definition: A &quot;VJOURNAL&quot; calendar component is defined by the * following notation: * * journalc = &quot;BEGIN&quot; &quot;:&quot; &quot;VJOURNAL&quot; CRLF * jourprop * &quot;END&quot; &quot;:&quot; &quot;VJOURNAL&quot; CRLF * * jourprop = *( * * ; the following are optional, * ; but MUST NOT occur more than once * * class / created / description / dtstart / dtstamp / * last-mod / organizer / recurid / seq / status / * summary / uid / url / * * ; the following are optional, * ; and MAY occur more than once * * attach / attendee / categories / comment / * contact / exdate / exrule / related / rdate / * rrule / rstatus / x-prop * * ) * </pre> * * Example 1 - Creating a journal associated with an event: * * <pre><code> * DtStart meetingDate = (DtStart) meeting.getProperties().getProperty( * Property.DTSTART); * * VJournal minutes = new VJournal(meetingDate.getTime(), * &quot;Progress Meeting - Minutes&quot;); * * // add timezone information.. * TzId tzParam = meetingDate.getParameters().getParmaeter(Parameter.TZID); * minutes.getProperties().getProperty(Property.DTSTART).getParameters().add( * tzParam); * * // add description.. * minutes.getProperties().add(new Description(&quot;1. Agenda.., 2. Action Items..&quot;)); * </code></pre> * * @author Ben Fortuna */ public class VJournal extends CalendarComponent { private static final long serialVersionUID = -7635140949183238830L; private final Map methodValidators = new HashMap(); { methodValidators.put(Method.ADD, new AddValidator()); methodValidators.put(Method.CANCEL, new CancelValidator()); methodValidators.put(Method.PUBLISH, new PublishValidator()); } /** * Default constructor. */ public VJournal() { super(VJOURNAL); getProperties().add(new DtStamp()); } /** * Constructor. * @param properties a list of properties */ public VJournal(final PropertyList properties) { super(VJOURNAL, properties); } /** * Constructs a new VJOURNAL instance associated with the specified time with the specified summary. * @param start the date the journal entry is associated with * @param summary the journal summary */ public VJournal(final Date start, final String summary) { this(); getProperties().add(new DtStart(start)); getProperties().add(new Summary(summary)); } /** * {@inheritDoc} */ public final void validate(final boolean recurse) throws ValidationException { if (!CompatibilityHints .isHintEnabled(CompatibilityHints.KEY_RELAXED_VALIDATION)) { // From "4.8.4.7 Unique Identifier": // Conformance: The property MUST be specified in the "VEVENT", "VTODO", // "VJOURNAL" or "VFREEBUSY" calendar components. PropertyValidator.getInstance().assertOne(Property.UID, getProperties()); // From "4.8.7.2 Date/Time Stamp": // Conformance: This property MUST be included in the "VEVENT", "VTODO", // "VJOURNAL" or "VFREEBUSY" calendar components. PropertyValidator.getInstance().assertOne(Property.DTSTAMP, getProperties()); } /* * ; the following are optional, ; but MUST NOT occur more than once class / created / description / dtstart / * dtstamp / last-mod / organizer / recurid / seq / status / summary / uid / url / */ PropertyValidator.getInstance().assertOneOrLess(Property.CLASS, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.CREATED, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.DESCRIPTION, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.DTSTART, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.DTSTAMP, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.LAST_MODIFIED, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.ORGANIZER, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.RECURRENCE_ID, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.SEQUENCE, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.STATUS, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.SUMMARY, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.UID, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.URL, getProperties()); final Status status = (Status) getProperty(Property.STATUS); if (status != null && !Status.VJOURNAL_DRAFT.getValue().equals(status.getValue()) && !Status.VJOURNAL_FINAL.getValue().equals(status.getValue()) && !Status.VJOURNAL_CANCELLED.getValue().equals(status.getValue())) { throw new ValidationException("Status property [" + status.toString() + "] may not occur in VJOURNAL"); } /* * ; the following are optional, ; and MAY occur more than once attach / attendee / categories / comment / * contact / exdate / exrule / related / rdate / rrule / rstatus / x-prop */ if (recurse) { validateProperties(); } } /** * {@inheritDoc} */ protected Validator getValidator(Method method) { return (Validator) methodValidators.get(method); } /** * <pre> * Component/Property Presence * ------------------- ---------------------------------------------- * METHOD 1 MUST be "ADD" * VJOURNAL 1 * DESCRIPTION 1 Can be null. * DTSTAMP 1 * DTSTART 1 * ORGANIZER 1 * SEQUENCE 1 MUST be greater than 0 * UID 1 MUST match that of the original journal * * ATTACH 0+ * CATEGORIES 0 or 1 This property MAY contain a list of values * CLASS 0 or 1 * COMMENT 0 or 1 * CONTACT 0+ * CREATED 0 or 1 * EXDATE 0+ * EXRULE 0+ * LAST-MODIFIED 0 or 1 * RDATE 0+ * RELATED-TO 0+ * RRULE 0+ * STATUS 0 or 1 MAY be one of DRAFT/FINAL/CANCELLED * SUMMARY 0 or 1 Can be null * URL 0 or 1 * X-PROPERTY 0+ * * ATTENDEE 0 * RECURRENCE-ID 0 * * VALARM 0+ * VTIMEZONE 0 or 1 MUST be present if any date/time refers to * a timezone * X-COMPONENT 0+ * * VEVENT 0 * VFREEBUSY 0 * VTODO 0 * </pre> * */ private class AddValidator implements Validator { private static final long serialVersionUID = 1L; public void validate() throws ValidationException { PropertyValidator.getInstance().assertOne(Property.DESCRIPTION, getProperties()); PropertyValidator.getInstance().assertOne(Property.DTSTAMP, getProperties()); PropertyValidator.getInstance().assertOne(Property.DTSTART, getProperties()); PropertyValidator.getInstance().assertOne(Property.ORGANIZER, getProperties()); PropertyValidator.getInstance().assertOne(Property.SEQUENCE, getProperties()); PropertyValidator.getInstance().assertOne(Property.UID, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.CATEGORIES, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.CLASS, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.CREATED, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.LAST_MODIFIED, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.STATUS, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.SUMMARY, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.URL, getProperties()); PropertyValidator.getInstance().assertNone(Property.ATTENDEE, getProperties()); PropertyValidator.getInstance().assertNone(Property.RECURRENCE_ID, getProperties()); } } /** * <pre> * Component/Property Presence * ------------------- --------------------------------------------- * METHOD 1 MUST be "CANCEL" * VJOURNAL 1+ All MUST have the same UID * DTSTAMP 1 * ORGANIZER 1 * SEQUENCE 1 * UID 1 MUST be the UID of the original REQUEST * * ATTACH 0+ * ATTENDEE 0+ * CATEGORIES 0 or 1 This property MAY contain a list of values * CLASS 0 or 1 * COMMENT 0 or 1 * CONTACT 0+ * CREATED 0 or 1 * DESCRIPTION 0 or 1 * DTSTART 0 or 1 * EXDATE 0+ * EXRULE 0+ * LAST-MODIFIED 0 or 1 * RDATE 0+ * RECURRENCE-ID 0 or 1 only if referring to an instance of a * recurring calendar component. Otherwise * it MUST NOT be present. * RELATED-TO 0+ * RRULE 0+ * STATUS 0 or 1 MAY be present, must be "CANCELLED" if * present * SUMMARY 0 or 1 * URL 0 or 1 * X-PROPERTY 0+ * * REQUEST-STATUS 0 * * VTIMEZONE 0+ MUST be present if any date/time refers to * a timezone * X-COMPONENT 0+ * VALARM 0 * VEVENT 0 * VFREEBUSY 0 * VTODO 0 * </pre> * */ private class CancelValidator implements Validator { private static final long serialVersionUID = 1L; public void validate() throws ValidationException { PropertyValidator.getInstance().assertOne(Property.DTSTAMP, getProperties()); PropertyValidator.getInstance().assertOne(Property.ORGANIZER, getProperties()); PropertyValidator.getInstance().assertOne(Property.SEQUENCE, getProperties()); PropertyValidator.getInstance().assertOne(Property.UID, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.CATEGORIES, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.CLASS, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.CREATED, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.DESCRIPTION, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.DTSTART, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.LAST_MODIFIED, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.RECURRENCE_ID, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.STATUS, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.SUMMARY, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.URL, getProperties()); PropertyValidator.getInstance().assertNone(Property.REQUEST_STATUS, getProperties()); } } /** * <pre> * Component/Property Presence * ------------------- ---------------------------------------------- * METHOD 1 MUST be "PUBLISH" * VJOURNAL 1+ * DESCRIPTION 1 Can be null. * DTSTAMP 1 * DTSTART 1 * ORGANIZER 1 * UID 1 * * ATTACH 0+ * CATEGORIES 0 or 1 This property MAY contain a list of values * CLASS 0 or 1 * COMMENT 0 or 1 * CONTACT 0+ * CREATED 0 or 1 * EXDATE 0+ * EXRULE 0+ * LAST-MODIFIED 0 or 1 * RDATE 0+ * RECURRENCE-ID 0 or 1 MUST only if referring to an instance of a * recurring calendar component. Otherwise * it MUST NOT be present. * RELATED-TO 0+ * RRULE 0+ * SEQUENCE 0 or 1 MUST echo the original SEQUENCE number. * MUST be present if non-zero. MAY be * present if zero. * STATUS 0 or 1 MAY be one of DRAFT/FINAL/CANCELLED * SUMMARY 0 or 1 Can be null * URL 0 or 1 * X-PROPERTY 0+ * * ATTENDEE 0 * * VALARM 0+ * VTIMEZONE 0+ MUST be present if any date/time refers to * a timezone * X-COMPONENT 0+ * * VEVENT 0 * VFREEBUSY 0 * VTODO 0 * </pre> * */ private class PublishValidator implements Validator { private static final long serialVersionUID = 1L; public void validate() throws ValidationException { PropertyValidator.getInstance().assertOne(Property.DESCRIPTION, getProperties()); PropertyValidator.getInstance().assertOne(Property.DTSTAMP, getProperties()); PropertyValidator.getInstance().assertOne(Property.DTSTART, getProperties()); PropertyValidator.getInstance().assertOne(Property.ORGANIZER, getProperties()); PropertyValidator.getInstance().assertOne(Property.UID, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.CATEGORIES, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.CLASS, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.CREATED, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.LAST_MODIFIED, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.RECURRENCE_ID, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.SEQUENCE, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.STATUS, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.SUMMARY, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.URL, getProperties()); PropertyValidator.getInstance().assertNone(Property.ATTENDEE, getProperties()); } } /** * @return the optional access classification property for a journal entry */ public final Clazz getClassification() { return (Clazz) getProperty(Property.CLASS); } /** * @return the optional creation-time property for a journal entry */ public final Created getCreated() { return (Created) getProperty(Property.CREATED); } /** * @return the optional description property for a journal entry */ public final Description getDescription() { return (Description) getProperty(Property.DESCRIPTION); } /** * Convenience method to pull the DTSTART out of the property list. * @return The DtStart object representation of the start Date */ public final DtStart getStartDate() { return (DtStart) getProperty(Property.DTSTART); } /** * @return the optional last-modified property for a journal entry */ public final LastModified getLastModified() { return (LastModified) getProperty(Property.LAST_MODIFIED); } /** * @return the optional organizer property for a journal entry */ public final Organizer getOrganizer() { return (Organizer) getProperty(Property.ORGANIZER); } /** * @return the optional date-stamp property */ public final DtStamp getDateStamp() { return (DtStamp) getProperty(Property.DTSTAMP); } /** * @return the optional sequence number property for a journal entry */ public final Sequence getSequence() { return (Sequence) getProperty(Property.SEQUENCE); } /** * @return the optional status property for a journal entry */ public final Status getStatus() { return (Status) getProperty(Property.STATUS); } /** * @return the optional summary property for a journal entry */ public final Summary getSummary() { return (Summary) getProperty(Property.SUMMARY); } /** * @return the optional URL property for a journal entry */ public final Url getUrl() { return (Url) getProperty(Property.URL); } /** * @return the optional recurrence identifier property for a journal entry */ public final RecurrenceId getRecurrenceId() { return (RecurrenceId) getProperty(Property.RECURRENCE_ID); } /** * Returns the UID property of this component if available. * @return a Uid instance, or null if no UID property exists */ public final Uid getUid() { return (Uid) getProperty(Property.UID); } }
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package com.nopy.html.elements.semantics; import com.nopy.html.attributes.Angular; import com.nopy.html.attributes.Element; import com.nopy.html.attributes.Global; /** * * @author RODRIGO */ public class Abbr { private final Element element; private final Global global; private final Angular angular; public Abbr() { this.element = new Element(); this.global = new Global(); this.angular = new Angular(); } public Abbr setElement(Object content) { this.element.setElement(content); return this; } /* ATRIBUTOS */ /* GLOBAL */ public Abbr setId(Object value) { this.global.setId(value); return this; } public Abbr setClass(Object value) { this.global.setClass(value); return this; } public Abbr setStyle(Object value) { this.global.setStyle(value); return this; } public Abbr setTitle(Object value) { this.global.setTitle(value); return this; } public Abbr setAccesskey(Object accesskey) { this.global.setAccesskey(accesskey); return this; } public Abbr setContenteditable(Object contenteditable) { this.global.setContenteditable(contenteditable); return this; } public Abbr setDir(Object dir) { this.global.setDir(dir); return this; } public Abbr setDraggable(Object draggable) { this.global.setDraggable(draggable); return this; } public Abbr setDropzone(Object dropzone) { this.global.setDropzone(dropzone); return this; } public Abbr setHidden(Object hidden) { this.global.setHidden(hidden); return this; } public Abbr setLang(Object lang) { this.global.setLang(lang); return this; } public Abbr setSpellcheck(Object spellcheck) { this.global.setSpellcheck(spellcheck); return this; } public Abbr setTabindex(Object tabindex) { this.global.setTabindex(tabindex); return this; } public Abbr setTranslate(Object translate) { this.global.setTranslate(translate); return this; } /* ANGULAR */ public Abbr setNgApp(Object ngApp) { this.angular.setNgApp(ngApp); return this; } public Abbr setNgBind(Object ngBind) { this.angular.setNgBind(ngBind); return this; } public Abbr setNgBindHtml(Object ngBindHtml) { this.angular.setNgBindHtml(ngBindHtml); return this; } public Abbr setNgBindTemplate(Object ngBindTemplate) { this.angular.setNgBindTemplate(ngBindTemplate); return this; } public Abbr setNgBlur(Object ngBlur) { this.angular.setNgBlur(ngBlur); return this; } public Abbr setNgChange(Object ngChange) { this.angular.setNgChange(ngChange); return this; } public Abbr setNgChecked(Object ngChecked) { this.angular.setNgChecked(ngChecked); return this; } public Abbr setNgClass(Object ngClass) { this.angular.setNgClass(ngClass); return this; } public Abbr setNgClassEven(Object ngClassEven) { this.angular.setNgClassEven(ngClassEven); return this; } public Abbr setNgClassOdd(Object ngClassOdd) { this.angular.setNgClassOdd(ngClassOdd); return this; } public Abbr setNgClick(Object ngClick) { this.angular.setNgClick(ngClick); return this; } public Abbr setNgCloak(Object ngCloak) { this.angular.setNgCloak(ngCloak); return this; } public Abbr setNgController(Object ngController) { this.angular.setNgController(ngController); return this; } public Abbr setNgCopy(Object ngCopy) { this.angular.setNgCopy(ngCopy); return this; } public Abbr setNgCsp(Object ngCsp) { this.angular.setNgCsp(ngCsp); return this; } public Abbr setNgCut(Object ngCut) { this.angular.setNgCut(ngCut); return this; } public Abbr setNgDblclick(Object ngDblclick) { this.angular.setNgDblclick(ngDblclick); return this; } public Abbr setNgDisable(Object ngDisable) { this.angular.setNgDisable(ngDisable); return this; } public Abbr setNgFocus(Object ngFocus) { this.angular.setNgFocus(ngFocus); return this; } public Abbr setNgForm(Object ngForm) { this.angular.setNgForm(ngForm); return this; } public Abbr setNgHide(Object ngHide) { this.angular.setNgHide(ngHide); return this; } public Abbr setNgHref(Object ngHref) { this.angular.setNgHref(ngHref); return this; } public Abbr setNgIf(Object ngIf) { this.angular.setNgIf(ngIf); return this; } public Abbr setNgKeydown(Object ngKeydown) { this.angular.setNgKeydown(ngKeydown); return this; } public Abbr setNgKeypress(Object ngKeypress) { this.angular.setNgKeypress(ngKeypress); return this; } public Abbr setNgKeyup(Object ngKeyup) { this.angular.setNgKeyup(ngKeyup); return this; } public Abbr setNgList(Object ngList) { this.angular.setNgList(ngList); return this; } public Abbr setNgModel(Object ngModel) { this.angular.setNgModel(ngModel); return this; } public Abbr setNgModelOptions(Object ngModelOptions) { this.angular.setNgModelOptions(ngModelOptions); return this; } public Abbr setNgMousedown(Object ngMousedown) { this.angular.setNgMousedown(ngMousedown); return this; } public Abbr setNgMouseenter(Object ngMouseenter) { this.angular.setNgMouseenter(ngMouseenter); return this; } public Abbr setNgMouseleave(Object ngMouseleave) { this.angular.setNgMouseleave(ngMouseleave); return this; } public Abbr setNgMousemove(Object ngMousemove) { this.angular.setNgMousemove(ngMousemove); return this; } public Abbr setNgMouseover(Object ngMouseover) { this.angular.setNgMouseover(ngMouseover); return this; } public Abbr setNgMouseup(Object ngMouseup) { this.angular.setNgMouseup(ngMouseup); return this; } public Abbr setNgNonBindatable(Object ngNonBindatable) { this.angular.setNgNonBindatable(ngNonBindatable); return this; } public Abbr setNgOpen(Object ngOpen) { this.angular.setNgOpen(ngOpen); return this; } public Abbr setNgOptions(Object ngOptions) { this.angular.setNgOptions(ngOptions); return this; } public Abbr setNgPaste(Object ngPaste) { this.angular.setNgPaste(ngPaste); return this; } public Abbr setNgPluralize(Object ngPluralize) { this.angular.setNgPluralize(ngPluralize); return this; } public Abbr setNgReadonly(Object ngReadonly) { this.angular.setNgReadonly(ngReadonly); return this; } public Abbr setNgRepeat(Object ngRepeat) { this.angular.setNgRepeat(ngRepeat); return this; } public Abbr setNgSelected(Object ngSelected) { this.angular.setNgSelected(ngSelected); return this; } public Abbr setNgShow(Object ngShow) { this.angular.setNgShow(ngShow); return this; } public Abbr setNgSrc(Object ngSrc) { this.angular.setNgSrc(ngSrc); return this; } public Abbr setNgSrcset(Object ngSrcset) { this.angular.setNgSrcset(ngSrcset); return this; } public Abbr setNgStyle(Object ngStyle) { this.angular.setNgStyle(ngStyle); return this; } public Abbr setNgSubmit(Object ngSubmit) { this.angular.setNgSubmit(ngSubmit); return this; } public Abbr setNgSwitch(Object ngSwitch) { this.angular.setNgSwitch(ngSwitch); return this; } public Abbr setNgTransclude(Object ngTransclude) { this.angular.setNgTransclude(ngTransclude); return this; } public Abbr setNgValue(Object ngValue) { this.angular.setNgValue(ngValue); return this; } public Abbr setNgView(Object ngView) { this.angular.setNgView(ngView); return this; } /* GET STRING */ @Override public String toString() { return "<abbr" + this.global + this.angular + ">" + this.element + "</abbr>"; } }
/* * Copyright 2012 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.netty.handler.codec.http.multipart; import io.netty.handler.codec.http.DefaultHttpRequest; import io.netty.handler.codec.http.HttpConstants; import io.netty.handler.codec.http.HttpRequest; import java.io.IOException; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.Collections; import java.util.IdentityHashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; /** * Default factory giving {@link Attribute} and {@link FileUpload} according to constructor. * * <p>According to the constructor, {@link Attribute} and {@link FileUpload} can be:</p> * <ul> * <li>MemoryAttribute, DiskAttribute or MixedAttribute</li> * <li>MemoryFileUpload, DiskFileUpload or MixedFileUpload</li> * </ul> */ public class DefaultHttpDataFactory implements HttpDataFactory { /** * Proposed default MINSIZE as 16 KB. */ public static final long MINSIZE = 0x4000; /** * Proposed default MAXSIZE = -1 as UNLIMITED */ public static final long MAXSIZE = -1; private final boolean useDisk; private final boolean checkSize; private long minSize; private long maxSize = MAXSIZE; private Charset charset = HttpConstants.DEFAULT_CHARSET; /** * Keep all {@link HttpData}s until cleaning methods are called. * We need to use {@link IdentityHashMap} because different requests may be equal. * See {@link DefaultHttpRequest#hashCode} and {@link DefaultHttpRequest#equals}. * Similarly, when removing data items, we need to check their identities because * different data items may be equal. */ private final Map<HttpRequest, List<HttpData>> requestFileDeleteMap = Collections.synchronizedMap(new IdentityHashMap<HttpRequest, List<HttpData>>()); /** * HttpData will be in memory if less than default size (16KB). * The type will be Mixed. */ public DefaultHttpDataFactory() { useDisk = false; checkSize = true; minSize = MINSIZE; } public DefaultHttpDataFactory(Charset charset) { this(); this.charset = charset; } /** * HttpData will be always on Disk if useDisk is True, else always in Memory if False */ public DefaultHttpDataFactory(boolean useDisk) { this.useDisk = useDisk; checkSize = false; } public DefaultHttpDataFactory(boolean useDisk, Charset charset) { this(useDisk); this.charset = charset; } /** * HttpData will be on Disk if the size of the file is greater than minSize, else it * will be in memory. The type will be Mixed. */ public DefaultHttpDataFactory(long minSize) { useDisk = false; checkSize = true; this.minSize = minSize; } public DefaultHttpDataFactory(long minSize, Charset charset) { this(minSize); this.charset = charset; } @Override public void setMaxLimit(long maxSize) { this.maxSize = maxSize; } /** * @return the associated list of {@link HttpData} for the request */ private List<HttpData> getList(HttpRequest request) { List<HttpData> list = requestFileDeleteMap.get(request); if (list == null) { list = new ArrayList<HttpData>(); requestFileDeleteMap.put(request, list); } return list; } @Override public Attribute createAttribute(HttpRequest request, String name) { if (useDisk) { Attribute attribute = new DiskAttribute(name, charset); attribute.setMaxSize(maxSize); List<HttpData> list = getList(request); list.add(attribute); return attribute; } if (checkSize) { Attribute attribute = new MixedAttribute(name, minSize, charset); attribute.setMaxSize(maxSize); List<HttpData> list = getList(request); list.add(attribute); return attribute; } MemoryAttribute attribute = new MemoryAttribute(name); attribute.setMaxSize(maxSize); return attribute; } @Override public Attribute createAttribute(HttpRequest request, String name, long definedSize) { if (useDisk) { Attribute attribute = new DiskAttribute(name, definedSize, charset); attribute.setMaxSize(maxSize); List<HttpData> list = getList(request); list.add(attribute); return attribute; } if (checkSize) { Attribute attribute = new MixedAttribute(name, definedSize, minSize, charset); attribute.setMaxSize(maxSize); List<HttpData> list = getList(request); list.add(attribute); return attribute; } MemoryAttribute attribute = new MemoryAttribute(name, definedSize); attribute.setMaxSize(maxSize); return attribute; } /** * Utility method */ private static void checkHttpDataSize(HttpData data) { try { data.checkSize(data.length()); } catch (IOException ignored) { throw new IllegalArgumentException("Attribute bigger than maxSize allowed"); } } @Override public Attribute createAttribute(HttpRequest request, String name, String value) { if (useDisk) { Attribute attribute; try { attribute = new DiskAttribute(name, value, charset); attribute.setMaxSize(maxSize); } catch (IOException e) { // revert to Mixed mode attribute = new MixedAttribute(name, value, minSize, charset); attribute.setMaxSize(maxSize); } checkHttpDataSize(attribute); List<HttpData> list = getList(request); list.add(attribute); return attribute; } if (checkSize) { Attribute attribute = new MixedAttribute(name, value, minSize, charset); attribute.setMaxSize(maxSize); checkHttpDataSize(attribute); List<HttpData> list = getList(request); list.add(attribute); return attribute; } try { MemoryAttribute attribute = new MemoryAttribute(name, value, charset); attribute.setMaxSize(maxSize); checkHttpDataSize(attribute); return attribute; } catch (IOException e) { throw new IllegalArgumentException(e); } } @Override public FileUpload createFileUpload(HttpRequest request, String name, String filename, String contentType, String contentTransferEncoding, Charset charset, long size) { if (useDisk) { FileUpload fileUpload = new DiskFileUpload(name, filename, contentType, contentTransferEncoding, charset, size); fileUpload.setMaxSize(maxSize); checkHttpDataSize(fileUpload); List<HttpData> list = getList(request); list.add(fileUpload); return fileUpload; } if (checkSize) { FileUpload fileUpload = new MixedFileUpload(name, filename, contentType, contentTransferEncoding, charset, size, minSize); fileUpload.setMaxSize(maxSize); checkHttpDataSize(fileUpload); List<HttpData> list = getList(request); list.add(fileUpload); return fileUpload; } MemoryFileUpload fileUpload = new MemoryFileUpload(name, filename, contentType, contentTransferEncoding, charset, size); fileUpload.setMaxSize(maxSize); checkHttpDataSize(fileUpload); return fileUpload; } @Override public void removeHttpDataFromClean(HttpRequest request, InterfaceHttpData data) { if (!(data instanceof HttpData)) { return; } // Do not use getList because it adds empty list to requestFileDeleteMap // if request is not found List<HttpData> list = requestFileDeleteMap.get(request); if (list == null) { return; } // Can't simply call list.remove(data), because different data items may be equal. // Need to check identity. Iterator<HttpData> i = list.iterator(); while (i.hasNext()) { HttpData n = i.next(); if (n == data) { i.remove(); // Remove empty list to avoid memory leak if (list.isEmpty()) { requestFileDeleteMap.remove(request); } return; } } } @Override public void cleanRequestHttpData(HttpRequest request) { List<HttpData> list = requestFileDeleteMap.remove(request); if (list != null) { for (HttpData data : list) { data.release(); } } } @Override public void cleanAllHttpData() { Iterator<Entry<HttpRequest, List<HttpData>>> i = requestFileDeleteMap.entrySet().iterator(); while (i.hasNext()) { Entry<HttpRequest, List<HttpData>> e = i.next(); // Calling i.remove() here will cause "java.lang.IllegalStateException: Entry was removed" // at e.getValue() below List<HttpData> list = e.getValue(); for (HttpData data : list) { data.release(); } i.remove(); } } @Override public void cleanRequestHttpDatas(HttpRequest request) { cleanRequestHttpData(request); } @Override public void cleanAllHttpDatas() { cleanAllHttpData(); } }
package org.srplib.reflection; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.List; import org.srplib.contract.Argument; import org.srplib.contract.Assert; import org.srplib.support.ExceptionUtils; /** * Simplifies method invocation via reflection. * * <ul> * <li>Encapsulates method signature: class, parameter parameters. So invoker may be reused to create multiple instances.</li> * <li>Wraps checked exception into unchecked.</li> * <li>Provides very detailed error message.</li> * </ul> * * <p>Client should specify parameters for a method invocation.</p> * * <pre> * // Using ReflectionUtils * ReflectionUtils.newInstance(Person.class, * new Class[] {String.class, String.class, int.class}, * new Object[] {"John", "Smith", 35}) * * // Using ReflectionInvoker * Person person = ReflectionInvoker.constructor(Person.class) * .parameters(String.class, String.class, int.class) * .invoke("John", "Smith", 35); * * // Invoke constructor without parameters * ReflectionInvoker.constructor(Person.class).invoke(); * * // Invoke method * Person person = ... * ReflectionInvoker.method(person, "setName").invoke("James"); * * // Create invoker for multiple invocations * ReflectionInvoker invoker = ReflectionInvoker.method(person, "setName"); * invoker.invoke("John"); * ... * invoker.invoke("James"); * * // Create invoker for multiple constructor invocation * ReflectionInvoker invoker = ReflectionInvoker.constructor(Person.class).parameters(String.class, Integer.class); * * Person john = invoker.invoke("John", 24); * Person james = invoker.invoke("James", 33); * </pre> * * @author Anton Pechinsky */ public class ReflectionInvoker<T, V> { private Class<T> clazz; private T target; private String methodName; private Class<?>[] parameters = {}; private String errorMessagePattern; private Object[] errorMessageParameters; /** * Create constructor invoker. * * @param clazz Class a class to create instance of * @return ReflectionInvoker */ public static <T> ReflectionInvoker<T, T> constructor(Class<T> clazz) { Argument.checkNotNull(clazz, "Can't create object with 'null' class!"); return new ReflectionInvoker<T, T>(clazz, null); } /** * Create method invoker for specified class and method name. * * <p>This method is used if target object isn't known and should be provided later with {@link #target} method.</p> * * @param clazz Class a class of target object * @param methodName String name of method to invoke * @return ReflectionInvoker */ public static <T, V> ReflectionInvoker<T, V> method(Class<T> clazz, String methodName) { Argument.checkNotNull(clazz, "Can't create object of 'null' class!"); return new ReflectionInvoker<T, V>(clazz, methodName); } /** * Create method invoker for specified method of target object. * * @param target Class a class to create instance of * @param methodName String name of method to invoke * @return ReflectionBeanBuilder */ public static <T, V> ReflectionInvoker<T, V> method(T target, String methodName) { ReflectionInvoker<T, V> invoker = method((Class<T>) target.getClass(), methodName); invoker.target = target; return invoker; } /** * Creates builder for specified class. * * @param clazz Class a class to create instance of */ public ReflectionInvoker(Class<T> clazz, String methodName, Class<?>... parameters) { Argument.checkNotNull(clazz, "Can't create object with 'null' class!"); this.clazz = clazz; this.methodName = methodName; this.parameters = parameters; } /** * Specify parameter parameters as list. * * @param target Object set target object to invoke methods on * @return this */ public ReflectionInvoker<T, V> target(T target) { this.target = target; return this; } /** * Specify parameter parameters as list. * * @param types List constructor parameter parameters. * @return this */ public ReflectionInvoker<T, V> parameters(List<Class<?>> types) { Argument.checkNotNull(types, "Parameter parameters must not be null!"); parameters((Class[]) types.toArray()); return this; } /** * Specify parameter parameters as vararg. * * @param parameters vararg constructor parameter parameters. * @return this */ public ReflectionInvoker<T, V> parameters(Class<?>... parameters) { // fail fast checks if (isConstructorInvoker()) { Argument.checkTrue(ReflectionUtils.hasConstructor(clazz, parameters), "No constructor " + ToStringHelper.toString(clazz, null, parameters)); } else { Argument.checkTrue(ReflectionUtils.findMethodRecursively(clazz, methodName, parameters) != null, "No method " + ToStringHelper.toString(clazz, methodName, parameters)); } this.parameters = parameters; return this; } private boolean isConstructorInvoker() { return methodName == null; } /** * An error message to be added to standard exception message. * * @param pattern String error message pattern. * @param parameters vararg message pattern parameters. * @return this */ public ReflectionInvoker<T, V> errorMessage(String pattern, Object... parameters) { this.errorMessagePattern = pattern; this.errorMessageParameters = parameters; return this; } /** * Creates new instance. * * @param arguments vararg of actual parameters. * @return new instance * @throws IllegalArgumentException if number actual parameters more or less than number of formal parameters (parameters). * @throws ReflectionException if object creation fails */ public V invoke(Object... arguments) { try { if (isConstructorInvoker()) { return (V) ReflectionUtils.newInstance(clazz, parameters, arguments); } else { // TODO: cache this Method method = ReflectionUtils.getMethodRecursively(clazz, methodName, parameters); return ReflectionUtils.invokeMethod(target, method, arguments); } } catch (ReflectionException e) { throw new ReflectionException(getUserMessage() + " " + e.getMessage(), e); } } /** * Invokes specified method of specified target using reflection. * * <p>Method wraps all checked exceptions into unchecked exceptions.</p> * * @param method Method to invoke * @param target Object the target the underlying method is invoked from * @param arguments vararg array of method arguments. * @return method invocation result */ @SuppressWarnings("unchecked") public static <T> T invokeMethod(Object target, Method method, Object... arguments) { Argument.checkNotNull(method, "Argument 'method' must not be null!"); Assert.checkTrue(Modifier.isStatic(method.getModifiers()) || target != null, "Argument 'target' must not be null for non-static method."); boolean accessible = method.isAccessible(); try { method.setAccessible(true); return (T) method.invoke(target, arguments); } catch (IllegalAccessException e) { throw new ReflectionException(getMethodInvocationErrorMessage( target.getClass(), method.getName(), method.getParameterTypes(), arguments), e); } catch (InvocationTargetException e) { throw ExceptionUtils.asUnchecked(e.getCause()); } finally { method.setAccessible(accessible); } } private static String getMethodInvocationErrorMessage(Class<?> clazz, String methodName, Class<?>[] parameterTypes, Object[] parameters) { return "Method invocation error " + ToStringHelper.toString(clazz, methodName, parameterTypes, parameters); } /** * Creates instance of specified class and wraps checked exceptions into unchecked ones. * * <p>Converts checked exceptions to unchecked</p> * * @param clazz Class a class to create instance * @param parameters Class[] array of constructor parameters (parameter types). * @param arguments Object[] an array of constructor arguments. * @return an instance of specified class */ @SuppressWarnings("unchecked") public static <T> T newInstance(Class<T> clazz, Class[] parameters, Object[] arguments) { try { if (parameters.length != arguments.length) { Argument.fail(getInstanceCreationErrorMessage(clazz, parameters, arguments) + " Expecting %d arguments but actually got %d.", parameters.length, arguments.length); } Constructor<T> constructor = clazz.getDeclaredConstructor(parameters); constructor.setAccessible(true); return constructor.newInstance(arguments); } catch (InstantiationException e) { throw new ReflectionException(getInstanceCreationErrorMessage(clazz, parameters, arguments), e); } catch (IllegalAccessException e) { throw new ReflectionException(getInstanceCreationErrorMessage(clazz, parameters, arguments), e); } catch (NoSuchMethodException e) { throw new ReflectionException(getInstanceCreationErrorMessage(clazz, parameters, arguments), e); } catch (InvocationTargetException e) { throw ExceptionUtils.asUnchecked(e.getCause()); } } /** * A name of constructor method. Used for diagnostic purposes. */ private static final String CONSTRUCTOR_NAME = "constructor"; private static String getInstanceCreationErrorMessage(Class<?> clazz, Class<?>[] parameterTypes, Object[] parameters) { return "Instance creation error " + ToStringHelper.toString(clazz, CONSTRUCTOR_NAME, parameterTypes, parameters); } /** * Returns detail message provided by user. * * @return String formatted user message string. */ private String getUserMessage() { return errorMessagePattern == null ? "" : String.format(errorMessagePattern, errorMessageParameters); } }
/* * Copyright (c) 2008-2018, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.map.impl.nearcache.invalidation; import com.hazelcast.config.Config; import com.hazelcast.config.MapConfig; import com.hazelcast.config.NearCacheConfig; import com.hazelcast.core.HazelcastInstance; import com.hazelcast.core.IMap; import com.hazelcast.internal.nearcache.impl.invalidation.Invalidator; import com.hazelcast.internal.nearcache.impl.invalidation.MetaDataGenerator; import com.hazelcast.internal.partition.InternalPartitionService; import com.hazelcast.map.impl.MapService; import com.hazelcast.map.impl.MapServiceContext; import com.hazelcast.map.impl.nearcache.MapNearCacheManager; import com.hazelcast.spi.impl.NodeEngineImpl; import com.hazelcast.test.AssertTask; import com.hazelcast.test.HazelcastParallelClassRunner; import com.hazelcast.test.HazelcastTestSupport; import com.hazelcast.test.TestHazelcastInstanceFactory; import com.hazelcast.test.annotation.ParallelTest; import com.hazelcast.test.annotation.QuickTest; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import java.util.Map; import java.util.UUID; import java.util.concurrent.atomic.AtomicBoolean; import static com.hazelcast.config.InMemoryFormat.BINARY; import static com.hazelcast.internal.nearcache.NearCacheTestUtils.getBaseConfig; import static com.hazelcast.map.impl.MapService.SERVICE_NAME; import static com.hazelcast.util.MapUtil.createHashMap; import static java.lang.String.format; import static org.junit.Assert.assertEquals; @RunWith(HazelcastParallelClassRunner.class) @Category({QuickTest.class, ParallelTest.class}) public class MemberMapInvalidationMetaDataMigrationTest extends HazelcastTestSupport { private static final int MAP_SIZE = 10000; private static final String MAP_NAME = "MapInvalidationMetaDataMigrationTest"; private TestHazelcastInstanceFactory factory = new TestHazelcastInstanceFactory(); private Config config; @Before public void setUp() { config = getConfig(MAP_NAME); } @After public void tearDown() { factory.shutdownAll(); } @Test public void sequences_migrated_whenNewlyJoinedNodesShutdown() { HazelcastInstance instance1 = factory.newHazelcastInstance(config); IMap<Object, Object> map = instance1.getMap(MAP_NAME); for (int i = 0; i < MAP_SIZE; i++) { map.put(i, i); } assertInvalidationCountEventually(MAP_NAME, MAP_SIZE, instance1); Map<Integer, Long> source = getPartitionToSequenceMap(MAP_NAME, instance1); HazelcastInstance instance2 = factory.newHazelcastInstance(config); waitAllForSafeState(instance2); instance1.shutdown(); HazelcastInstance instance3 = factory.newHazelcastInstance(config); waitAllForSafeState(instance3); instance2.shutdown(); waitAllForSafeState(instance3); Map<Integer, Long> destination = getPartitionToSequenceMap(MAP_NAME, instance3); assertEqualsSequenceNumbers(source, destination); } @Test public void sequences_migrated_whenSourceNodeShutdown() { HazelcastInstance instance1 = factory.newHazelcastInstance(config); IMap<Object, Object> map = instance1.getMap(MAP_NAME); for (int i = 0; i < MAP_SIZE; i++) { map.put(i, i); } assertInvalidationCountEventually(MAP_NAME, MAP_SIZE, instance1); Map<Integer, Long> source = getPartitionToSequenceMap(MAP_NAME, instance1); HazelcastInstance instance2 = factory.newHazelcastInstance(config); HazelcastInstance instance3 = factory.newHazelcastInstance(config); waitAllForSafeState(instance2, instance3); instance1.shutdown(); Map<Integer, Long> destination2 = getPartitionToSequenceMap(MAP_NAME, instance2); Map<Integer, Long> destination3 = getPartitionToSequenceMap(MAP_NAME, instance3); for (Map.Entry<Integer, Long> entry : destination2.entrySet()) { Integer key = entry.getKey(); Long value = entry.getValue(); if (value != 0) { destination3.put(key, value); } } assertEqualsSequenceNumbers(source, destination3); } @Test public void sequences_migrated_whenOneNodeContinuouslyStartsAndStops() { HazelcastInstance instance1 = factory.newHazelcastInstance(config); IMap<Object, Object> map = instance1.getMap(MAP_NAME); for (int i = 0; i < MAP_SIZE; i++) { map.put(i, i); } assertInvalidationCountEventually(MAP_NAME, MAP_SIZE, instance1); Map<Integer, Long> source = getPartitionToSequenceMap(MAP_NAME, instance1); HazelcastInstance instance2 = factory.newHazelcastInstance(config); final AtomicBoolean stop = new AtomicBoolean(); Thread shadow = new Thread(new Runnable() { @Override public void run() { while (!stop.get()) { HazelcastInstance instance = factory.newHazelcastInstance(config); waitAllForSafeState(instance); sleepSeconds(5); instance.shutdown(); } } }); shadow.start(); sleepSeconds(20); stop.set(true); assertJoinable(shadow); instance2.shutdown(); Map<Integer, Long> destination = getPartitionToSequenceMap(MAP_NAME, instance1); assertEqualsSequenceNumbers(source, destination); } @Test public void uuids_migrated_whenNewlyJoinedNodesShutdown() { HazelcastInstance instance1 = factory.newHazelcastInstance(config); IMap<Object, Object> map = instance1.getMap(MAP_NAME); for (int i = 0; i < MAP_SIZE; i++) { map.put(i, i); } assertInvalidationCountEventually(MAP_NAME, MAP_SIZE, instance1); Map<Integer, UUID> source = getPartitionToUuidMap(instance1); HazelcastInstance instance2 = factory.newHazelcastInstance(config); waitAllForSafeState(instance2); instance1.shutdown(); HazelcastInstance instance3 = factory.newHazelcastInstance(config); waitAllForSafeState(instance3); instance2.shutdown(); Map<Integer, UUID> destination = getPartitionToUuidMap(instance3); assertEqualsPartitionUUIDs(source, destination); } @Test public void uuids_migrated_whenSourceNodeShutdown() { final HazelcastInstance instance1 = factory.newHazelcastInstance(config); IMap<Object, Object> map = instance1.getMap(MAP_NAME); for (int i = 0; i < MAP_SIZE; i++) { map.put(i, i); } assertInvalidationCountEventually(MAP_NAME, MAP_SIZE, instance1); Map<Integer, UUID> source = getPartitionToUuidMap(instance1); HazelcastInstance instance2 = factory.newHazelcastInstance(config); HazelcastInstance instance3 = factory.newHazelcastInstance(config); waitAllForSafeState(instance1, instance2, instance3); instance1.shutdown(); Map<Integer, UUID> destination2 = getPartitionToUuidMap(instance2); Map<Integer, UUID> destination3 = getPartitionToUuidMap(instance3); InternalPartitionService partitionService2 = getNodeEngineImpl(instance2).getPartitionService(); Map<Integer, UUID> merged = mergeOwnedPartitionUuids(partitionService2, destination2, destination3); assertEqualsPartitionUUIDs(source, merged); } private void assertInvalidationCountEventually(final String mapName, final int expectedInvalidationCount, final HazelcastInstance instance) { assertTrueEventually(new AssertTask() { @Override public void run() { long invalidationCount = calculateNumberOfInvalidationsSoFar(mapName, instance); assertEquals(expectedInvalidationCount, invalidationCount); } }); } protected Config getConfig(String mapName) { MapConfig mapConfig = getMapConfig(mapName); return getBaseConfig() .addMapConfig(mapConfig); } protected MapConfig getMapConfig(String mapName) { NearCacheConfig nearCacheConfig = getNearCacheConfig(mapName); return new MapConfig(mapName) .setNearCacheConfig(nearCacheConfig) .setBackupCount(0) .setAsyncBackupCount(0); } protected NearCacheConfig getNearCacheConfig(String mapName) { return new NearCacheConfig(mapName) .setInMemoryFormat(BINARY) .setInvalidateOnChange(true) .setCacheLocalEntries(true); } private static long calculateNumberOfInvalidationsSoFar(String mapName, HazelcastInstance instance) { NodeEngineImpl nodeEngineImpl = getNodeEngineImpl(instance); int partitionCount = nodeEngineImpl.getPartitionService().getPartitionCount(); MetaDataGenerator metaDataGenerator = getMetaDataGenerator(nodeEngineImpl); long invalidationCount = 0; for (int i = 0; i < partitionCount; i++) { invalidationCount += metaDataGenerator.currentSequence(mapName, i); } return invalidationCount; } private static Map<Integer, Long> getPartitionToSequenceMap(String mapName, HazelcastInstance instance) { NodeEngineImpl nodeEngineImpl = getNodeEngineImpl(instance); int partitionCount = nodeEngineImpl.getPartitionService().getPartitionCount(); MetaDataGenerator metaDataGenerator = getMetaDataGenerator(nodeEngineImpl); Map<Integer, Long> partitionToSequenceMap = createHashMap(partitionCount); for (int i = 0; i < partitionCount; i++) { partitionToSequenceMap.put(i, metaDataGenerator.currentSequence(mapName, i)); } return partitionToSequenceMap; } private static Map<Integer, UUID> getPartitionToUuidMap(HazelcastInstance instance) { NodeEngineImpl nodeEngineImpl = getNodeEngineImpl(instance); int partitionCount = nodeEngineImpl.getPartitionService().getPartitionCount(); MetaDataGenerator metaDataGenerator = getMetaDataGenerator(nodeEngineImpl); Map<Integer, UUID> partitionToUuidMap = createHashMap(partitionCount); for (int i = 0; i < partitionCount; i++) { partitionToUuidMap.put(i, metaDataGenerator.getUuidOrNull(i)); } return partitionToUuidMap; } private static MetaDataGenerator getMetaDataGenerator(NodeEngineImpl nodeEngineImpl) { MapService mapService = nodeEngineImpl.getService(SERVICE_NAME); MapServiceContext mapServiceContext = mapService.getMapServiceContext(); MapNearCacheManager mapNearCacheManager = mapServiceContext.getMapNearCacheManager(); Invalidator invalidator = mapNearCacheManager.getInvalidator(); return invalidator.getMetaDataGenerator(); } private static Map<Integer, UUID> mergeOwnedPartitionUuids(InternalPartitionService localPartitionService, Map<Integer, UUID> localUUIDs, Map<Integer, UUID> remoteUUIDs) { int partitionCount = localPartitionService.getPartitionCount(); Map<Integer, UUID> merged = createHashMap(partitionCount); for (int i = 0; i < partitionCount; i++) { if (localPartitionService.getPartition(i).isLocal()) { merged.put(i, localUUIDs.get(i)); } else { merged.put(i, remoteUUIDs.get(i)); } } return merged; } private static void assertEqualsSequenceNumbers(Map<Integer, Long> source, Map<Integer, Long> destination) { for (Map.Entry<Integer, Long> entry : source.entrySet()) { Integer key = entry.getKey(); Long first = entry.getValue(); Long last = destination.get(key); assertEquals(format( "Expected source and destination sequence numbers to be the same (source: %s) (destination %s)", source, destination), first, last); } } private static void assertEqualsPartitionUUIDs(Map<Integer, UUID> source, Map<Integer, UUID> destination) { for (Map.Entry<Integer, UUID> entry : source.entrySet()) { Integer key = entry.getKey(); UUID first = entry.getValue(); UUID last = destination.get(key); assertEquals(format( "Expected source and destination partition UUIDs to be the same (source: %s) (destination %s)", source, destination), first, last); } } }
/** * Copyright The Apache Software Foundation * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with this * work for additional information regarding copyright ownership. The ASF * licenses this file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.alibaba.wasp.master; import com.alibaba.wasp.FConstants; import com.alibaba.wasp.Server; import com.alibaba.wasp.ServerName; import com.alibaba.wasp.WaspTestingUtility; import com.alibaba.wasp.monitoring.MonitoredTask; import com.alibaba.wasp.zookeeper.ClusterStatusTracker; import com.alibaba.wasp.zookeeper.MasterAddressTracker; import com.alibaba.wasp.zookeeper.ZKUtil; import com.alibaba.wasp.zookeeper.ZooKeeperListener; import com.alibaba.wasp.zookeeper.ZooKeeperWatcher; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; import org.apache.zookeeper.KeeperException; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.mockito.Mockito; import java.io.IOException; import java.util.concurrent.Semaphore; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; public class TestActiveMasterManager { private final static Log LOG = LogFactory .getLog(TestActiveMasterManager.class); private final static WaspTestingUtility TEST_UTIL = new WaspTestingUtility(); @BeforeClass public static void setUpBeforeClass() throws Exception { TEST_UTIL.getHBaseTestingUtility().startMiniZKCluster(); TEST_UTIL.getConfiguration().set(FConstants.ZOOKEEPER_QUORUM, TEST_UTIL.getConfiguration().get(HConstants.ZOOKEEPER_QUORUM)); TEST_UTIL.getConfiguration().set(FConstants.ZOOKEEPER_CLIENT_PORT, TEST_UTIL.getConfiguration().get(HConstants.ZOOKEEPER_CLIENT_PORT)); } @AfterClass public static void tearDownAfterClass() throws Exception { TEST_UTIL.getHBaseTestingUtility().shutdownMiniZKCluster(); } @Test public void testRestartMaster() throws IOException, KeeperException { ZooKeeperWatcher zk = new ZooKeeperWatcher(TEST_UTIL.getConfiguration(), "testActiveMasterManagerFromZK", null, true); try { ZKUtil.deleteNode(zk, zk.getMasterAddressZNode()); ZKUtil.deleteNode(zk, zk.clusterStateZNode); } catch (KeeperException.NoNodeException nne) { } // Create the master node with a dummy address ServerName master = new ServerName("localhost", 1, System.currentTimeMillis()); // Should not have a master yet DummyMaster dummyMaster = new DummyMaster(zk, master); ClusterStatusTracker clusterStatusTracker = dummyMaster .getClusterStatusTracker(); ActiveMasterManager activeMasterManager = dummyMaster .getActiveMasterManager(); assertFalse(activeMasterManager.clusterHasActiveMaster.get()); // First test becoming the active master uninterrupted MonitoredTask status = Mockito.mock(MonitoredTask.class); clusterStatusTracker.setClusterUp(); activeMasterManager.blockUntilBecomingActiveMaster(status, clusterStatusTracker); assertTrue(activeMasterManager.clusterHasActiveMaster.get()); assertMaster(zk, master); // Now pretend master restart DummyMaster secondDummyMaster = new DummyMaster(zk, master); ActiveMasterManager secondActiveMasterManager = secondDummyMaster .getActiveMasterManager(); assertFalse(secondActiveMasterManager.clusterHasActiveMaster.get()); activeMasterManager.blockUntilBecomingActiveMaster(status, clusterStatusTracker); assertTrue(activeMasterManager.clusterHasActiveMaster.get()); assertMaster(zk, master); } /** * Unit tests that uses ZooKeeper but does not use the master-side methods but * rather acts directly on ZK. * @throws Exception */ @Test public void testActiveMasterManagerFromZK() throws Exception { ZooKeeperWatcher zk = new ZooKeeperWatcher(TEST_UTIL.getConfiguration(), "testActiveMasterManagerFromZK", null, true); try { ZKUtil.deleteNode(zk, zk.getMasterAddressZNode()); ZKUtil.deleteNode(zk, zk.clusterStateZNode); } catch (KeeperException.NoNodeException nne) { } // Create the master node with a dummy address ServerName firstMasterAddress = new ServerName("localhost", 1, System.currentTimeMillis()); ServerName secondMasterAddress = new ServerName("localhost", 2, System.currentTimeMillis()); // Should not have a master yet DummyMaster ms1 = new DummyMaster(zk, firstMasterAddress); ActiveMasterManager activeMasterManager = ms1.getActiveMasterManager(); assertFalse(activeMasterManager.clusterHasActiveMaster.get()); // First test becoming the active master uninterrupted ClusterStatusTracker clusterStatusTracker = ms1.getClusterStatusTracker(); clusterStatusTracker.setClusterUp(); activeMasterManager.blockUntilBecomingActiveMaster( Mockito.mock(MonitoredTask.class), clusterStatusTracker); assertTrue(activeMasterManager.clusterHasActiveMaster.get()); assertMaster(zk, firstMasterAddress); // New manager will now try to become the active master in another thread WaitToBeMasterThread t = new WaitToBeMasterThread(zk, secondMasterAddress); t.start(); // Wait for this guy to figure out there is another active master // Wait for 1 second at most int sleeps = 0; while (!t.manager.clusterHasActiveMaster.get() && sleeps < 100) { Thread.sleep(10); sleeps++; } // Both should see that there is an active master assertTrue(activeMasterManager.clusterHasActiveMaster.get()); assertTrue(t.manager.clusterHasActiveMaster.get()); // But secondary one should not be the active master assertFalse(t.isActiveMaster); // Close the first server and delete it's master node ms1.stop("stopping first server"); // Use a listener to capture when the node is actually deleted NodeDeletionListener listener = new NodeDeletionListener(zk, zk.getMasterAddressZNode()); zk.registerListener(listener); LOG.info("Deleting master node"); ZKUtil.deleteNode(zk, zk.getMasterAddressZNode()); // Wait for the node to be deleted LOG.info("Waiting for active master manager to be notified"); listener.waitForDeletion(); LOG.info("Master node deleted"); // Now we expect the secondary manager to have and be the active master // Wait for 1 second at most sleeps = 0; while (!t.isActiveMaster && sleeps < 100) { Thread.sleep(10); sleeps++; } LOG.debug("Slept " + sleeps + " times"); assertTrue(t.manager.clusterHasActiveMaster.get()); assertTrue(t.isActiveMaster); LOG.info("Deleting master node"); ZKUtil.deleteNode(zk, zk.getMasterAddressZNode()); } /** * Assert there is an active master and that it has the specified address. * @param zk * @param thisMasterAddress * @throws org.apache.zookeeper.KeeperException * @throws java.io.IOException */ private void assertMaster(ZooKeeperWatcher zk, ServerName expectedAddress) throws KeeperException, IOException { ServerName readAddress = MasterAddressTracker.getMasterAddress(zk); assertNotNull(readAddress); assertTrue(expectedAddress.equals(readAddress)); } public static class WaitToBeMasterThread extends Thread { ActiveMasterManager manager; DummyMaster dummyMaster; boolean isActiveMaster; public WaitToBeMasterThread(ZooKeeperWatcher zk, ServerName address) { this.dummyMaster = new DummyMaster(zk, address); this.manager = this.dummyMaster.getActiveMasterManager(); isActiveMaster = false; } @Override public void run() { manager.blockUntilBecomingActiveMaster(Mockito.mock(MonitoredTask.class), this.dummyMaster.getClusterStatusTracker()); LOG.info("Second master has become the active master!"); isActiveMaster = true; } } public static class NodeDeletionListener extends ZooKeeperListener { private static final Log LOG = LogFactory .getLog(NodeDeletionListener.class); private Semaphore lock; private String node; public NodeDeletionListener(ZooKeeperWatcher watcher, String node) { super(watcher); lock = new Semaphore(0); this.node = node; } @Override public void nodeDeleted(String path) { if (path.equals(node)) { LOG.debug("nodeDeleted(" + path + ")"); lock.release(); } } public void waitForDeletion() throws InterruptedException { lock.acquire(); } } /** * Dummy Master Implementation. */ public static class DummyMaster implements Server { private volatile boolean stopped; private ClusterStatusTracker clusterStatusTracker; private ActiveMasterManager activeMasterManager; public DummyMaster(ZooKeeperWatcher zk, ServerName master) { this.clusterStatusTracker = new ClusterStatusTracker(zk, this); clusterStatusTracker.start(); this.activeMasterManager = new ActiveMasterManager(zk, master, this); zk.registerListener(activeMasterManager); } @Override public void abort(final String msg, final Throwable t) { } @Override public boolean isAborted() { return false; } @Override public Configuration getConfiguration() { return null; } @Override public ZooKeeperWatcher getZooKeeper() { return null; } @Override public ServerName getServerName() { return null; } @Override public boolean isStopped() { return this.stopped; } @Override public void stop(String why) { this.stopped = true; } public ClusterStatusTracker getClusterStatusTracker() { return clusterStatusTracker; } public ActiveMasterManager getActiveMasterManager() { return activeMasterManager; } } }
/* * This file is part of SpongeAPI, licensed under the MIT License (MIT). * * Copyright (c) SpongePowered <https://www.spongepowered.org> * Copyright (c) contributors * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.spongepowered.api.data; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; import static org.spongepowered.api.data.DataQuery.of; import com.google.common.base.Objects; import com.google.common.base.Optional; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import org.spongepowered.api.service.persistence.DataBuilder; import org.spongepowered.api.service.persistence.SerializationService; import org.spongepowered.api.util.Coerce; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; import javax.annotation.Nullable; /** * Default implementation of a {@link DataView} being used in memory. */ public class MemoryDataView implements DataView { protected final Map<String, Object> map = Maps.newLinkedHashMap(); private final DataContainer container; private final DataView parent; private final DataQuery path; protected MemoryDataView() { checkState(this instanceof DataContainer, "Cannot construct a root MemoryDataView without a container!"); this.path = of(); this.parent = this; this.container = (DataContainer) this; } protected MemoryDataView(DataView parent, DataQuery path) { checkArgument(path.getParts().size() >= 1, "Path must have at least one part"); this.parent = parent; this.container = parent.getContainer(); this.path = parent.getCurrentPath().then(path); } @Override public DataContainer getContainer() { return this.container; } @Override public DataQuery getCurrentPath() { return this.path; } @Override public String getName() { List<String> parts = this.path.getParts(); return parts.isEmpty() ? "" : parts.get(parts.size() - 1); } @Override public Optional<DataView> getParent() { return Optional.fromNullable(this.parent); } @Override public Set<DataQuery> getKeys(boolean deep) { ImmutableSet.Builder<DataQuery> builder = ImmutableSet.builder(); for (Map.Entry<String, Object> entry : this.map.entrySet()) { builder.add(of(entry.getKey())); } if (deep) { for (Map.Entry<String, Object> entry : this.map.entrySet()) { if (entry.getValue() instanceof DataView) { for (DataQuery query : ((DataView) entry.getValue()).getKeys(true)) { builder.add(of(entry.getKey()).then(query)); } } } } return builder.build(); } @Override public Map<DataQuery, Object> getValues(boolean deep) { ImmutableMap.Builder<DataQuery, Object> builder = ImmutableMap.builder(); for (DataQuery query : getKeys(deep)) { Object value = get(query).get(); if (value instanceof DataView) { builder.put(query, ((DataView) value).getValues(deep)); } else { builder.put(query, get(query).get()); } } return builder.build(); } @Override public boolean contains(DataQuery path) { checkNotNull(path, "path"); List<DataQuery> queryParts = path.getQueryParts(); if (queryParts.size() == 1) { String key = queryParts.get(0).getParts().get(0); return this.map.containsKey(key); } else { DataQuery subQuery = queryParts.get(0); Optional<DataView> subViewOptional = this.getUnsafeView(subQuery); if (!subViewOptional.isPresent()) { return false; } List<String> subParts = Lists.newArrayListWithCapacity(queryParts.size() - 1); for (int i = 1; i < queryParts.size(); i++) { subParts.add(queryParts.get(i).asString(".")); } return subViewOptional.get().contains(of(subParts)); } } @Override public Optional<Object> get(DataQuery path) { checkNotNull(path, "path"); List<DataQuery> queryParts = path.getQueryParts(); int sz = queryParts.size(); if (sz == 0) { return Optional.<Object>of(this); } if (sz == 1) { String key = queryParts.get(0).getParts().get(0); if (this.map.containsKey(key)) { return Optional.of(this.map.get(key)); } else { return Optional.absent(); } } DataQuery subQuery = queryParts.get(0); Optional<DataView> subViewOptional = this.getUnsafeView(subQuery); DataView subView; if (!subViewOptional.isPresent()) { return Optional.absent(); } else { subView = subViewOptional.get(); } List<String> subParts = Lists.newArrayListWithCapacity(queryParts.size() - 1); for (int i = 1; i < queryParts.size(); i++) { subParts.add(queryParts.get(i).asString(".")); } return subView.get(of(subParts)); } @Override public DataView set(DataQuery path, Object value) { checkNotNull(path, "path"); checkNotNull(value, "value"); checkState(this.container != null); if (value instanceof DataView) { checkArgument(value != this, "Cannot set a DataView to itself."); copyDataView(path, (DataView) value); } else if (value instanceof DataSerializable) { DataContainer valueContainer = ((DataSerializable) value).toContainer(); checkArgument(!(valueContainer).equals(this), "Cannot insert self-referencing DataSerializable"); copyDataView(path, valueContainer); } else { List<String> parts = path.getParts(); if (parts.size() > 1) { String subKey = parts.get(0); DataQuery subQuery = of(subKey); Optional<DataView> subViewOptional = this.getUnsafeView(subQuery); DataView subView; if (!subViewOptional.isPresent()) { this.createView(subQuery); subView = (DataView) this.map.get(subKey); } else { subView = subViewOptional.get(); } List<String> subParts = Lists.newArrayListWithCapacity(parts.size() - 1); for (int i = 1; i < parts.size(); i++) { subParts.add(parts.get(i)); } subView.set(of(subParts), value); } else { if (value instanceof Collection) { setCollection(parts.get(0), (Collection) value); } else if (value instanceof Object[]) { setCollection(parts.get(0), Lists.newArrayList((Object[]) value)); } else if (value instanceof Map) { setMap(parts.get(0), (Map) value); } else { this.map.put(parts.get(0), value); } } } return this; } private void setCollection(String key, Collection<?> value) { ImmutableList.Builder<Object> builder = ImmutableList.builder(); for (Object object : value) { if (object instanceof DataSerializable) { builder.add(((DataSerializable) object).toContainer()); } else if (object instanceof DataView) { MemoryDataView view = new MemoryDataContainer(); DataView internalView = (DataView) object; for (Map.Entry<DataQuery, Object> entry : internalView.getValues(false).entrySet()) { view.set(entry.getKey(), entry.getValue()); } builder.add(view); } else if (object instanceof Map) { builder.add(ImmutableMap.copyOf((Map<?, ?>) object)); } else if (object instanceof Collection) { builder.add(ImmutableList.copyOf((Collection<?>) object)); } else { builder.add(object); } } this.map.put(key, builder.build()); } private void setMap(String key, Map<?, ?> value) { DataView view = createView(of(key)); for (Map.Entry<?, ?> entry : value.entrySet()) { view.set(of(entry.getKey().toString()), entry.getValue()); } } private void copyDataView(DataQuery path, DataView value) { Collection<DataQuery> valueKeys = value.getKeys(true); for (DataQuery oldKey : valueKeys) { set(path.then(oldKey), value.get(oldKey).get()); } } @Override public DataView remove(DataQuery path) { checkNotNull(path, "path"); List<String> parts = path.getParts(); if (parts.size() > 1) { String subKey = parts.get(0); DataQuery subQuery = of(subKey); Optional<DataView> subViewOptional = this.getUnsafeView(subQuery); DataView subView; if (!subViewOptional.isPresent()) { return this; } else { subView = subViewOptional.get(); } List<String> subParts = Lists.newArrayListWithCapacity(parts.size() - 1); for (int i = 1; i < parts.size(); i++) { subParts.add(parts.get(i)); } subView.remove(of(subParts)); } else { this.map.remove(parts.get(0)); } return this; } @Override public DataView createView(DataQuery path) { checkNotNull(path, "path"); List<DataQuery> queryParts = path.getQueryParts(); int sz = queryParts.size(); checkArgument(sz != 0, "The size of the query must be at least 1"); if (sz == 1) { DataQuery key = queryParts.get(0); DataView result = new MemoryDataView(this, key); this.map.put(key.getParts().get(0), result); return result; } else { List<String> subParts = Lists .newArrayListWithCapacity(queryParts.size() - 1); for (int i = 1; i < sz; i++) { subParts.add(queryParts.get(i).asString('.')); } DataQuery subQuery = of(subParts); DataView subView = (DataView) this.map.get(queryParts.get(0).asString('.')); if (subView == null) { subView = new MemoryDataView(this.parent, queryParts.get(0)); this.map.put(queryParts.get(0).asString('.'), subView); } return subView.createView(subQuery); } } @Override public DataView createView(DataQuery path, Map<?, ?> map) { checkNotNull(path, "path"); DataView section = createView(path); for (Map.Entry<?, ?> entry : map.entrySet()) { if (entry.getValue() instanceof Map) { section.createView(of('.', entry.getKey().toString()), (Map<?, ?>) entry.getValue()); } else { section.set(of('.', entry.getKey().toString()), entry.getValue()); } } return section; } @Override public Optional<DataView> getView(DataQuery path) { Optional<Object> val = get(path); if (val.isPresent()) { if (val.get() instanceof DataView) { return Optional.of((DataView) val.get()); } } return Optional.absent(); } @Override @SuppressWarnings("unchecked") public Optional<? extends Map<?, ?>> getMap(DataQuery path) { Optional<Object> val = get(path); if (val.isPresent()) { if (val.get() instanceof DataView) { ImmutableMap.Builder<String, Object> builder = ImmutableMap.builder(); for (Map.Entry<DataQuery, Object> entry : ((DataView) val.get()).getValues(false).entrySet()) { if (entry.getValue() instanceof Collection) { builder.put(entry.getKey().asString('.'), ImmutableList.copyOf((Collection) entry.getValue())); } else if (entry.getValue() instanceof Map) { builder.put(entry.getKey().asString('.'), ImmutableMap.copyOf((Map) entry.getValue())); } else { builder.put(entry.getKey().asString('.'), entry.getValue()); } } return Optional.of(builder.build()); } else if (val.get() instanceof Map) { return Optional.of((Map<?, ?>) val.get()); } } return Optional.absent(); } private Optional<DataView> getUnsafeView(DataQuery path) { Optional<Object> val = get(path); if (val.isPresent()) { if (val.get() instanceof DataView) { return Optional.of((DataView) val.get()); } } return Optional.absent(); } @Override public Optional<Boolean> getBoolean(DataQuery path) { Optional<Object> val = get(path); if (val.isPresent()) { return Coerce.asBoolean(val.get()); } return Optional.absent(); } @Override public Optional<Integer> getInt(DataQuery path) { Optional<Object> val = get(path); if (val.isPresent()) { return Coerce.asInteger(val.get()); } return Optional.absent(); } @Override public Optional<Long> getLong(DataQuery path) { Optional<Object> val = get(path); if (val.isPresent()) { return Coerce.asLong(val.get()); } return Optional.absent(); } @Override public Optional<Double> getDouble(DataQuery path) { Optional<Object> val = get(path); if (val.isPresent()) { return Coerce.asDouble(val.get()); } return Optional.absent(); } @Override public Optional<String> getString(DataQuery path) { Optional<Object> val = get(path); if (val.isPresent()) { return Coerce.asString(val.get()); } return Optional.absent(); } @Override public Optional<List<?>> getList(DataQuery path) { Optional<Object> val = get(path); if (val.isPresent()) { if (val.get() instanceof List<?>) { return Optional.<List<?>>of(Lists.newArrayList((List<?>) val.get())); } if (val.get() instanceof Object[]) { return Optional.<List<?>>of(Lists.newArrayList((Object[]) val.get())); } } return Optional.absent(); } @Override public Optional<List<String>> getStringList(DataQuery path) { Optional<List<?>> list = getUnsafeList(path); if (!list.isPresent()) { return Optional.absent(); } List<String> newList = Lists.newArrayList(); for (Object object : list.get()) { Optional<String> optional = Coerce.asString(object); if (optional.isPresent()) { newList.add(optional.get()); } } return Optional.of(newList); } private Optional<List<?>> getUnsafeList(DataQuery path) { Optional<Object> val = get(path); if (val.isPresent()) { if (val.get() instanceof List<?>) { return Optional.<List<?>>of((List<?>) val.get()); } else if (val.get() instanceof Object[]) { return Optional.<List<?>>of(Arrays.asList(((Object[]) val.get()))); } } return Optional.absent(); } @Override public Optional<List<Character>> getCharacterList(DataQuery path) { Optional<List<?>> list = getUnsafeList(path); if (!list.isPresent()) { return Optional.absent(); } List<Character> newList = Lists.newArrayList(); for (Object object : list.get()) { Optional<Character> optional = Coerce.asChar(object); if (optional.isPresent()) { newList.add(optional.get()); } } return Optional.of(newList); } @Override public Optional<List<Boolean>> getBooleanList(DataQuery path) { Optional<List<?>> list = getUnsafeList(path); if (!list.isPresent()) { return Optional.absent(); } List<Boolean> newList = Lists.newArrayList(); for (Object object : list.get()) { Optional<Boolean> optional = Coerce.asBoolean(object); if (optional.isPresent()) { newList.add(optional.get()); } } return Optional.of(newList); } @Override public Optional<List<Byte>> getByteList(DataQuery path) { Optional<List<?>> list = getUnsafeList(path); if (!list.isPresent()) { return Optional.absent(); } List<Byte> newList = Lists.newArrayList(); for (Object object : list.get()) { Optional<Byte> optional = Coerce.asByte(object); if (optional.isPresent()) { newList.add(optional.get()); } } return Optional.of(newList); } @Override public Optional<List<Short>> getShortList(DataQuery path) { Optional<List<?>> list = getUnsafeList(path); if (!list.isPresent()) { return Optional.absent(); } List<Short> newList = Lists.newArrayList(); for (Object object : list.get()) { Optional<Short> optional = Coerce.asShort(object); if (optional.isPresent()) { newList.add(optional.get()); } } return Optional.of(newList); } @Override public Optional<List<Integer>> getIntegerList(DataQuery path) { Optional<List<?>> list = getUnsafeList(path); if (!list.isPresent()) { return Optional.absent(); } List<Integer> newList = Lists.newArrayList(); for (Object object : list.get()) { Optional<Integer> optional = Coerce.asInteger(object); if (optional.isPresent()) { newList.add(optional.get()); } } return Optional.of(newList); } @Override public Optional<List<Long>> getLongList(DataQuery path) { Optional<List<?>> list = getUnsafeList(path); if (!list.isPresent()) { return Optional.absent(); } List<Long> newList = Lists.newArrayList(); for (Object object : list.get()) { Optional<Long> optional = Coerce.asLong(object); if (optional.isPresent()) { newList.add(optional.get()); } } return Optional.of(newList); } @Override public Optional<List<Float>> getFloatList(DataQuery path) { Optional<List<?>> list = getUnsafeList(path); if (!list.isPresent()) { return Optional.absent(); } List<Float> newList = Lists.newArrayList(); for (Object object : list.get()) { Optional<Float> optional = Coerce.asFloat(object); if (optional.isPresent()) { newList.add(optional.get()); } } return Optional.of(newList); } @Override public Optional<List<Double>> getDoubleList(DataQuery path) { Optional<List<?>> list = getUnsafeList(path); if (!list.isPresent()) { return Optional.absent(); } List<Double> newList = Lists.newArrayList(); for (Object object : list.get()) { Optional<Double> optional = Coerce.asDouble(object); if (optional.isPresent()) { newList.add(optional.get()); } } return Optional.of(newList); } @Override public Optional<List<Map<?, ?>>> getMapList(DataQuery path) { Optional<List<?>> list = getUnsafeList(path); if (!list.isPresent()) { return Optional.absent(); } List<Map<?, ?>> newList = Lists.newArrayList(); for (Object object : list.get()) { if (object instanceof Map) { newList.add((Map<?, ?>) object); } } return Optional.of(newList); } @Override public Optional<List<DataView>> getViewList(DataQuery path) { Optional<List<?>> list = getUnsafeList(path); if (!list.isPresent()) { return Optional.absent(); } List<DataView> newList = Lists.newArrayList(); for (Object object : list.get()) { if (object instanceof DataView) { newList.add((DataView) object); } } return Optional.of(newList); } @Override public <T extends DataSerializable> Optional<T> getSerializable(DataQuery path, Class<T> clazz, SerializationService service) { checkNotNull(path, "path"); checkNotNull(clazz, "clazz"); checkNotNull(service, "service"); Optional<DataView> optional = getUnsafeView(path); if (!optional.isPresent()) { return Optional.absent(); } Optional<DataBuilder<T>> builderOptional = service.getBuilder(clazz); if (!builderOptional.isPresent()) { return Optional.absent(); } else { return builderOptional.get().build(optional.get()); } } @Override public <T extends DataSerializable> Optional<List<T>> getSerializableList(DataQuery path, Class<T> clazz, SerializationService service) { checkNotNull(path, "path"); checkNotNull(clazz, "clazz"); checkNotNull(service, "service"); Optional<List<DataView>> optional = getViewList(path); if (!optional.isPresent()) { return Optional.absent(); } Optional<DataBuilder<T>> builderOptional = service.getBuilder(clazz); if (!builderOptional.isPresent()) { return Optional.absent(); } else { List<T> newList = Lists.newArrayList(); for (DataView view : optional.get()) { Optional<T> element = builderOptional.get().build(view); if (element.isPresent()) { newList.add(element.get()); } } return Optional.of(newList); } } @Override public int hashCode() { return Objects.hashCode(this.map, this.path); } @Override public boolean equals(@Nullable Object obj) { if (this == obj) { return true; } if (obj == null || getClass() != obj.getClass()) { return false; } final MemoryDataView other = (MemoryDataView) obj; return Objects.equal(this.map.entrySet(), other.map.entrySet()) && Objects.equal(this.path, other.path); } }
/* * Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.wso2.carbon.device.mgt.mobile.impl.android; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.wso2.carbon.device.mgt.common.DeviceManagementException; import org.wso2.carbon.device.mgt.common.Feature; import org.wso2.carbon.device.mgt.common.FeatureManager; import org.wso2.carbon.device.mgt.mobile.dao.AbstractMobileDeviceManagementDAOFactory; import org.wso2.carbon.device.mgt.mobile.dao.MobileDeviceManagementDAOException; import org.wso2.carbon.device.mgt.mobile.dao.MobileDeviceManagementDAOFactory; import org.wso2.carbon.device.mgt.mobile.dao.MobileFeatureDAO; import org.wso2.carbon.device.mgt.mobile.dto.MobileFeature; import org.wso2.carbon.device.mgt.mobile.impl.android.dao.AndroidDAOFactory; import org.wso2.carbon.device.mgt.mobile.util.MobileDeviceManagementUtil; import java.util.ArrayList; import java.util.List; public class AndroidFeatureManager implements FeatureManager { private MobileFeatureDAO featureDAO; private static final Log log = LogFactory.getLog(AndroidFeatureManager.class); public AndroidFeatureManager() { MobileDeviceManagementDAOFactory daoFactory = new AndroidDAOFactory(); this.featureDAO = daoFactory.getMobileFeatureDAO(); } @Override public boolean addFeature(Feature feature) throws DeviceManagementException { try { AndroidDAOFactory.beginTransaction(); MobileFeature mobileFeature = MobileDeviceManagementUtil.convertToMobileFeature(feature); featureDAO.addFeature(mobileFeature); AndroidDAOFactory.commitTransaction(); return true; } catch (MobileDeviceManagementDAOException e) { try { AndroidDAOFactory.rollbackTransaction(); } catch (MobileDeviceManagementDAOException e1) { log.warn("Error occurred while roll-backing the transaction", e); } throw new DeviceManagementException("Error occurred while adding the feature", e); } } @Override public boolean addFeatures(List<Feature> features) throws DeviceManagementException { List<MobileFeature> mobileFeatures = new ArrayList<MobileFeature>(features.size()); for (Feature feature : features) { mobileFeatures.add(MobileDeviceManagementUtil.convertToMobileFeature(feature)); } try { AndroidDAOFactory.beginTransaction(); featureDAO.addFeatures(mobileFeatures); AndroidDAOFactory.commitTransaction(); return true; } catch (MobileDeviceManagementDAOException e) { try { AndroidDAOFactory.rollbackTransaction(); } catch (MobileDeviceManagementDAOException e1) { log.warn("Error occurred while roll-backing the transaction", e); } throw new DeviceManagementException("Error occurred while adding the features", e); } } @Override public Feature getFeature(String name) throws DeviceManagementException { try { MobileFeature mobileFeature = featureDAO.getFeatureByCode(name); Feature feature = MobileDeviceManagementUtil.convertToFeature(mobileFeature); return feature; } catch (MobileDeviceManagementDAOException e) { throw new DeviceManagementException("Error occurred while retrieving the feature", e); } } @Override public List<Feature> getFeatures() throws DeviceManagementException { try { List<MobileFeature> mobileFeatures = featureDAO.getAllFeatures(); List<Feature> featureList = new ArrayList<Feature>(mobileFeatures.size()); for (MobileFeature mobileFeature : mobileFeatures) { featureList.add(MobileDeviceManagementUtil.convertToFeature(mobileFeature)); } return featureList; } catch (MobileDeviceManagementDAOException e) { throw new DeviceManagementException("Error occurred while retrieving the list of features registered for " + "Android platform", e); } } @Override public boolean removeFeature(String code) throws DeviceManagementException { boolean status; try { AndroidDAOFactory.beginTransaction(); featureDAO.deleteFeatureByCode(code); AndroidDAOFactory.commitTransaction(); status = true; } catch (MobileDeviceManagementDAOException e) { try { AndroidDAOFactory.rollbackTransaction(); } catch (MobileDeviceManagementDAOException e1) { log.warn("Error occurred while roll-backing the transaction", e); } throw new DeviceManagementException("Error occurred while removing the feature", e); } return status; } @Override public boolean addSupportedFeaturesToDB() throws DeviceManagementException { synchronized (this) { List<Feature> supportedFeatures = getSupportedFeatures(); List<Feature> existingFeatures = this.getFeatures(); List<Feature> missingFeatures = MobileDeviceManagementUtil. getMissingFeatures(supportedFeatures, existingFeatures); if (missingFeatures.size() > 0) { return this.addFeatures(missingFeatures); } return true; } } //Get the supported feature list. private static List<Feature> getSupportedFeatures() { List<Feature> supportedFeatures = new ArrayList<Feature>(); Feature feature = new Feature(); feature.setCode("DEVICE_LOCK"); feature.setName("Device Lock"); feature.setDescription("Lock the device"); supportedFeatures.add(feature); feature = new Feature(); feature.setCode("DEVICE_UNLOCK"); feature.setName("Device Unlock"); feature.setDescription("Unlock the device"); supportedFeatures.add(feature); feature = new Feature(); feature.setCode("DEVICE_LOCATION"); feature.setName("Location"); feature.setDescription("Request coordinates of device location"); supportedFeatures.add(feature); feature = new Feature(); feature.setCode("WIFI"); feature.setName("wifi"); feature.setDescription("Setting up wifi configuration"); supportedFeatures.add(feature); feature = new Feature(); feature.setCode("CAMERA"); feature.setName("camera"); feature.setDescription("Enable or disable camera"); supportedFeatures.add(feature); feature = new Feature(); feature.setCode("EMAIL"); feature.setName("Email"); feature.setDescription("Configure email settings"); supportedFeatures.add(feature); feature = new Feature(); feature.setCode("DEVICE_MUTE"); feature.setName("Mute"); feature.setDescription("Enable mute in the device"); supportedFeatures.add(feature); feature = new Feature(); feature.setCode("DEVICE_INFO"); feature.setName("Device info"); feature.setDescription("Request device information"); supportedFeatures.add(feature); feature = new Feature(); feature.setCode("ENTERPRISE_WIPE"); feature.setName("Enterprise Wipe"); feature.setDescription("Remove enterprise applications"); supportedFeatures.add(feature); feature = new Feature(); feature.setCode("CLEAR_PASSWORD"); feature.setName("Clear Password"); feature.setDescription("Clear current password"); supportedFeatures.add(feature); feature = new Feature(); feature.setCode("WIPE_DATA"); feature.setName("Wipe Data"); feature.setDescription("Factory reset the device"); supportedFeatures.add(feature); feature = new Feature(); feature.setCode("APPLICATION_LIST"); feature.setName("Application List"); feature.setDescription("Request list of current installed applications"); supportedFeatures.add(feature); feature = new Feature(); feature.setCode("CHANGE_LOCK_CODE"); feature.setName("Change Lock-code"); feature.setDescription("Change current lock code"); supportedFeatures.add(feature); feature = new Feature(); feature.setCode("INSTALL_APPLICATION"); feature.setName("Install App"); feature.setDescription("Install Enterprise or Market application"); supportedFeatures.add(feature); feature = new Feature(); feature.setCode("UNINSTALL_APPLICATION"); feature.setName("Uninstall App"); feature.setDescription("Uninstall application"); supportedFeatures.add(feature); feature = new Feature(); feature.setCode("BLACKLIST_APPLICATIONS"); feature.setName("Blacklist app"); feature.setDescription("Blacklist applications"); supportedFeatures.add(feature); feature = new Feature(); feature.setCode("ENCRYPT_STORAGE"); feature.setName("Encrypt storage"); feature.setDescription("Encrypt storage"); supportedFeatures.add(feature); feature = new Feature(); feature.setCode("DEVICE_RING"); feature.setName("Ring"); feature.setDescription("Ring the device"); supportedFeatures.add(feature); feature = new Feature(); feature.setCode("PASSCODE_POLICY"); feature.setName("Password Policy"); feature.setDescription("Set passcode policy"); supportedFeatures.add(feature); feature = new Feature(); feature.setCode("NOTIFICATION"); feature.setName("Message"); feature.setDescription("Send message"); supportedFeatures.add(feature); feature = new Feature(); feature.setCode("DEVICE_REBOOT"); feature.setName("Reboot"); feature.setDescription("Reboot the device"); supportedFeatures.add(feature); feature = new Feature(); feature.setCode("UPGRADE_FIRMWARE"); feature.setName("Upgrade Firmware"); feature.setDescription("Upgrade Firmware"); supportedFeatures.add(feature); feature = new Feature(); feature.setCode("VPN"); feature.setName("Configure VPN"); feature.setDescription("Configure VPN settings"); supportedFeatures.add(feature); return supportedFeatures; } }
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University // Copyright (c) 2011, 2012 Open Networking Foundation // Copyright (c) 2012, 2013 Big Switch Networks, Inc. // This library was generated by the LoxiGen Compiler. // See the file LICENSE.txt which should have been included in the source distribution // Automatically generated by LOXI from template of_class.java // Do not modify package org.projectfloodlight.openflow.protocol.ver13; import org.projectfloodlight.openflow.protocol.*; import org.projectfloodlight.openflow.protocol.action.*; import org.projectfloodlight.openflow.protocol.actionid.*; import org.projectfloodlight.openflow.protocol.bsntlv.*; import org.projectfloodlight.openflow.protocol.errormsg.*; import org.projectfloodlight.openflow.protocol.meterband.*; import org.projectfloodlight.openflow.protocol.instruction.*; import org.projectfloodlight.openflow.protocol.instructionid.*; import org.projectfloodlight.openflow.protocol.match.*; import org.projectfloodlight.openflow.protocol.stat.*; import org.projectfloodlight.openflow.protocol.oxm.*; import org.projectfloodlight.openflow.protocol.oxs.*; import org.projectfloodlight.openflow.protocol.queueprop.*; import org.projectfloodlight.openflow.types.*; import org.projectfloodlight.openflow.util.*; import org.projectfloodlight.openflow.exceptions.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Set; import com.google.common.collect.ImmutableSet; import java.util.List; import com.google.common.collect.ImmutableList; import io.netty.buffer.ByteBuf; import com.google.common.hash.PrimitiveSink; import com.google.common.hash.Funnel; class OFBsnFlowChecksumBucketStatsReplyVer13 implements OFBsnFlowChecksumBucketStatsReply { private static final Logger logger = LoggerFactory.getLogger(OFBsnFlowChecksumBucketStatsReplyVer13.class); // version: 1.3 final static byte WIRE_VERSION = 4; final static int MINIMUM_LENGTH = 24; private final static long DEFAULT_XID = 0x0L; private final static Set<OFStatsReplyFlags> DEFAULT_FLAGS = ImmutableSet.<OFStatsReplyFlags>of(); private final static List<OFBsnFlowChecksumBucketStatsEntry> DEFAULT_ENTRIES = ImmutableList.<OFBsnFlowChecksumBucketStatsEntry>of(); // OF message fields private final long xid; private final Set<OFStatsReplyFlags> flags; private final List<OFBsnFlowChecksumBucketStatsEntry> entries; // // Immutable default instance final static OFBsnFlowChecksumBucketStatsReplyVer13 DEFAULT = new OFBsnFlowChecksumBucketStatsReplyVer13( DEFAULT_XID, DEFAULT_FLAGS, DEFAULT_ENTRIES ); // package private constructor - used by readers, builders, and factory OFBsnFlowChecksumBucketStatsReplyVer13(long xid, Set<OFStatsReplyFlags> flags, List<OFBsnFlowChecksumBucketStatsEntry> entries) { if(flags == null) { throw new NullPointerException("OFBsnFlowChecksumBucketStatsReplyVer13: property flags cannot be null"); } if(entries == null) { throw new NullPointerException("OFBsnFlowChecksumBucketStatsReplyVer13: property entries cannot be null"); } this.xid = xid; this.flags = flags; this.entries = entries; } // Accessors for OF message fields @Override public OFVersion getVersion() { return OFVersion.OF_13; } @Override public OFType getType() { return OFType.STATS_REPLY; } @Override public long getXid() { return xid; } @Override public OFStatsType getStatsType() { return OFStatsType.EXPERIMENTER; } @Override public Set<OFStatsReplyFlags> getFlags() { return flags; } @Override public long getExperimenter() { return 0x5c16c7L; } @Override public long getSubtype() { return 0xaL; } @Override public List<OFBsnFlowChecksumBucketStatsEntry> getEntries() { return entries; } public OFBsnFlowChecksumBucketStatsReply.Builder createBuilder() { return new BuilderWithParent(this); } static class BuilderWithParent implements OFBsnFlowChecksumBucketStatsReply.Builder { final OFBsnFlowChecksumBucketStatsReplyVer13 parentMessage; // OF message fields private boolean xidSet; private long xid; private boolean flagsSet; private Set<OFStatsReplyFlags> flags; private boolean entriesSet; private List<OFBsnFlowChecksumBucketStatsEntry> entries; BuilderWithParent(OFBsnFlowChecksumBucketStatsReplyVer13 parentMessage) { this.parentMessage = parentMessage; } @Override public OFVersion getVersion() { return OFVersion.OF_13; } @Override public OFType getType() { return OFType.STATS_REPLY; } @Override public long getXid() { return xid; } @Override public OFBsnFlowChecksumBucketStatsReply.Builder setXid(long xid) { this.xid = xid; this.xidSet = true; return this; } @Override public OFStatsType getStatsType() { return OFStatsType.EXPERIMENTER; } @Override public Set<OFStatsReplyFlags> getFlags() { return flags; } @Override public OFBsnFlowChecksumBucketStatsReply.Builder setFlags(Set<OFStatsReplyFlags> flags) { this.flags = flags; this.flagsSet = true; return this; } @Override public long getExperimenter() { return 0x5c16c7L; } @Override public long getSubtype() { return 0xaL; } @Override public List<OFBsnFlowChecksumBucketStatsEntry> getEntries() { return entries; } @Override public OFBsnFlowChecksumBucketStatsReply.Builder setEntries(List<OFBsnFlowChecksumBucketStatsEntry> entries) { this.entries = entries; this.entriesSet = true; return this; } @Override public OFBsnFlowChecksumBucketStatsReply build() { long xid = this.xidSet ? this.xid : parentMessage.xid; Set<OFStatsReplyFlags> flags = this.flagsSet ? this.flags : parentMessage.flags; if(flags == null) throw new NullPointerException("Property flags must not be null"); List<OFBsnFlowChecksumBucketStatsEntry> entries = this.entriesSet ? this.entries : parentMessage.entries; if(entries == null) throw new NullPointerException("Property entries must not be null"); // return new OFBsnFlowChecksumBucketStatsReplyVer13( xid, flags, entries ); } } static class Builder implements OFBsnFlowChecksumBucketStatsReply.Builder { // OF message fields private boolean xidSet; private long xid; private boolean flagsSet; private Set<OFStatsReplyFlags> flags; private boolean entriesSet; private List<OFBsnFlowChecksumBucketStatsEntry> entries; @Override public OFVersion getVersion() { return OFVersion.OF_13; } @Override public OFType getType() { return OFType.STATS_REPLY; } @Override public long getXid() { return xid; } @Override public OFBsnFlowChecksumBucketStatsReply.Builder setXid(long xid) { this.xid = xid; this.xidSet = true; return this; } @Override public OFStatsType getStatsType() { return OFStatsType.EXPERIMENTER; } @Override public Set<OFStatsReplyFlags> getFlags() { return flags; } @Override public OFBsnFlowChecksumBucketStatsReply.Builder setFlags(Set<OFStatsReplyFlags> flags) { this.flags = flags; this.flagsSet = true; return this; } @Override public long getExperimenter() { return 0x5c16c7L; } @Override public long getSubtype() { return 0xaL; } @Override public List<OFBsnFlowChecksumBucketStatsEntry> getEntries() { return entries; } @Override public OFBsnFlowChecksumBucketStatsReply.Builder setEntries(List<OFBsnFlowChecksumBucketStatsEntry> entries) { this.entries = entries; this.entriesSet = true; return this; } // @Override public OFBsnFlowChecksumBucketStatsReply build() { long xid = this.xidSet ? this.xid : DEFAULT_XID; Set<OFStatsReplyFlags> flags = this.flagsSet ? this.flags : DEFAULT_FLAGS; if(flags == null) throw new NullPointerException("Property flags must not be null"); List<OFBsnFlowChecksumBucketStatsEntry> entries = this.entriesSet ? this.entries : DEFAULT_ENTRIES; if(entries == null) throw new NullPointerException("Property entries must not be null"); return new OFBsnFlowChecksumBucketStatsReplyVer13( xid, flags, entries ); } } final static Reader READER = new Reader(); static class Reader implements OFMessageReader<OFBsnFlowChecksumBucketStatsReply> { @Override public OFBsnFlowChecksumBucketStatsReply readFrom(ByteBuf bb) throws OFParseError { int start = bb.readerIndex(); // fixed value property version == 4 byte version = bb.readByte(); if(version != (byte) 0x4) throw new OFParseError("Wrong version: Expected=OFVersion.OF_13(4), got="+version); // fixed value property type == 19 byte type = bb.readByte(); if(type != (byte) 0x13) throw new OFParseError("Wrong type: Expected=OFType.STATS_REPLY(19), got="+type); int length = U16.f(bb.readShort()); if(length < MINIMUM_LENGTH) throw new OFParseError("Wrong length: Expected to be >= " + MINIMUM_LENGTH + ", was: " + length); if(bb.readableBytes() + (bb.readerIndex() - start) < length) { // Buffer does not have all data yet bb.readerIndex(start); return null; } if(logger.isTraceEnabled()) logger.trace("readFrom - length={}", length); long xid = U32.f(bb.readInt()); // fixed value property statsType == 65535 short statsType = bb.readShort(); if(statsType != (short) 0xffff) throw new OFParseError("Wrong statsType: Expected=OFStatsType.EXPERIMENTER(65535), got="+statsType); Set<OFStatsReplyFlags> flags = OFStatsReplyFlagsSerializerVer13.readFrom(bb); // pad: 4 bytes bb.skipBytes(4); // fixed value property experimenter == 0x5c16c7L int experimenter = bb.readInt(); if(experimenter != 0x5c16c7) throw new OFParseError("Wrong experimenter: Expected=0x5c16c7L(0x5c16c7L), got="+experimenter); // fixed value property subtype == 0xaL int subtype = bb.readInt(); if(subtype != 0xa) throw new OFParseError("Wrong subtype: Expected=0xaL(0xaL), got="+subtype); List<OFBsnFlowChecksumBucketStatsEntry> entries = ChannelUtils.readList(bb, length - (bb.readerIndex() - start), OFBsnFlowChecksumBucketStatsEntryVer13.READER); OFBsnFlowChecksumBucketStatsReplyVer13 bsnFlowChecksumBucketStatsReplyVer13 = new OFBsnFlowChecksumBucketStatsReplyVer13( xid, flags, entries ); if(logger.isTraceEnabled()) logger.trace("readFrom - read={}", bsnFlowChecksumBucketStatsReplyVer13); return bsnFlowChecksumBucketStatsReplyVer13; } } public void putTo(PrimitiveSink sink) { FUNNEL.funnel(this, sink); } final static OFBsnFlowChecksumBucketStatsReplyVer13Funnel FUNNEL = new OFBsnFlowChecksumBucketStatsReplyVer13Funnel(); static class OFBsnFlowChecksumBucketStatsReplyVer13Funnel implements Funnel<OFBsnFlowChecksumBucketStatsReplyVer13> { private static final long serialVersionUID = 1L; @Override public void funnel(OFBsnFlowChecksumBucketStatsReplyVer13 message, PrimitiveSink sink) { // fixed value property version = 4 sink.putByte((byte) 0x4); // fixed value property type = 19 sink.putByte((byte) 0x13); // FIXME: skip funnel of length sink.putLong(message.xid); // fixed value property statsType = 65535 sink.putShort((short) 0xffff); OFStatsReplyFlagsSerializerVer13.putTo(message.flags, sink); // skip pad (4 bytes) // fixed value property experimenter = 0x5c16c7L sink.putInt(0x5c16c7); // fixed value property subtype = 0xaL sink.putInt(0xa); FunnelUtils.putList(message.entries, sink); } } public void writeTo(ByteBuf bb) { WRITER.write(bb, this); } final static Writer WRITER = new Writer(); static class Writer implements OFMessageWriter<OFBsnFlowChecksumBucketStatsReplyVer13> { @Override public void write(ByteBuf bb, OFBsnFlowChecksumBucketStatsReplyVer13 message) { int startIndex = bb.writerIndex(); // fixed value property version = 4 bb.writeByte((byte) 0x4); // fixed value property type = 19 bb.writeByte((byte) 0x13); // length is length of variable message, will be updated at the end int lengthIndex = bb.writerIndex(); bb.writeShort(U16.t(0)); bb.writeInt(U32.t(message.xid)); // fixed value property statsType = 65535 bb.writeShort((short) 0xffff); OFStatsReplyFlagsSerializerVer13.writeTo(bb, message.flags); // pad: 4 bytes bb.writeZero(4); // fixed value property experimenter = 0x5c16c7L bb.writeInt(0x5c16c7); // fixed value property subtype = 0xaL bb.writeInt(0xa); ChannelUtils.writeList(bb, message.entries); // update length field int length = bb.writerIndex() - startIndex; bb.setShort(lengthIndex, length); } } @Override public String toString() { StringBuilder b = new StringBuilder("OFBsnFlowChecksumBucketStatsReplyVer13("); b.append("xid=").append(xid); b.append(", "); b.append("flags=").append(flags); b.append(", "); b.append("entries=").append(entries); b.append(")"); return b.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; OFBsnFlowChecksumBucketStatsReplyVer13 other = (OFBsnFlowChecksumBucketStatsReplyVer13) obj; if( xid != other.xid) return false; if (flags == null) { if (other.flags != null) return false; } else if (!flags.equals(other.flags)) return false; if (entries == null) { if (other.entries != null) return false; } else if (!entries.equals(other.entries)) return false; return true; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * (int) (xid ^ (xid >>> 32)); result = prime * result + ((flags == null) ? 0 : flags.hashCode()); result = prime * result + ((entries == null) ? 0 : entries.hashCode()); return result; } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.suggest; import org.apache.lucene.util.CollectionUtil; import org.apache.lucene.util.SetOnce; import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParserUtils; import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry; import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry.Option; import org.elasticsearch.search.suggest.completion.CompletionSuggestion; import java.io.IOException; import java.util.ArrayList; import java.util.Comparator; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Objects; import java.util.stream.Collectors; import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; /** * Top level suggest result, containing the result for each suggestion. */ public class Suggest implements Iterable<Suggest.Suggestion<? extends Entry<? extends Option>>>, Writeable, ToXContentFragment { public static final String NAME = "suggest"; public static final Comparator<Option> COMPARATOR = (first, second) -> { int cmp = Float.compare(second.getScore(), first.getScore()); if (cmp != 0) { return cmp; } return first.getText().compareTo(second.getText()); }; private final List<Suggestion<? extends Entry<? extends Option>>> suggestions; private final boolean hasScoreDocs; private Map<String, Suggestion<? extends Entry<? extends Option>>> suggestMap; public Suggest(List<Suggestion<? extends Entry<? extends Option>>> suggestions) { // we sort suggestions by their names to ensure iteration over suggestions are consistent // this is needed as we need to fill in suggestion docs in SearchPhaseController#sortDocs // in the same order as we enrich the suggestions with fetch results in SearchPhaseController#merge suggestions.sort((o1, o2) -> o1.getName().compareTo(o2.getName())); this.suggestions = suggestions; this.hasScoreDocs = filter(CompletionSuggestion.class).stream().anyMatch(CompletionSuggestion::hasScoreDocs); } @SuppressWarnings({"rawtypes", "unchecked"}) public Suggest(StreamInput in) throws IOException { suggestions = (List) in.readNamedWriteableList(Suggestion.class); hasScoreDocs = filter(CompletionSuggestion.class).stream().anyMatch(CompletionSuggestion::hasScoreDocs); } @Override public Iterator<Suggestion<? extends Entry<? extends Option>>> iterator() { return suggestions.iterator(); } /** * The number of suggestions in this {@link Suggest} result */ public int size() { return suggestions.size(); } @SuppressWarnings("unchecked") public <T extends Suggestion<? extends Entry<? extends Option>>> T getSuggestion(String name) { if (suggestions.isEmpty() || name == null) { return null; } else if (suggestions.size() == 1) { return (T) (name.equals(suggestions.get(0).name) ? suggestions.get(0) : null); } else if (this.suggestMap == null) { suggestMap = new HashMap<>(); for (Suggest.Suggestion<? extends Entry<? extends Option>> item : suggestions) { suggestMap.put(item.getName(), item); } } return (T) suggestMap.get(name); } /** * Whether any suggestions had query hits */ public boolean hasScoreDocs() { return hasScoreDocs; } @Override public void writeTo(StreamOutput out) throws IOException { out.writeNamedWriteableList(suggestions); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(NAME); for (Suggestion<?> suggestion : suggestions) { suggestion.toXContent(builder, params); } builder.endObject(); return builder; } /** * this parsing method assumes that the leading "suggest" field name has already been parsed by the caller */ public static Suggest fromXContent(XContentParser parser) throws IOException { ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); List<Suggestion<? extends Entry<? extends Option>>> suggestions = new ArrayList<>(); while ((parser.nextToken()) != XContentParser.Token.END_OBJECT) { ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser); String currentField = parser.currentName(); ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.nextToken(), parser); Suggestion<? extends Entry<? extends Option>> suggestion = Suggestion.fromXContent(parser); if (suggestion != null) { suggestions.add(suggestion); } else { throw new ParsingException(parser.getTokenLocation(), String.format(Locale.ROOT, "Could not parse suggestion keyed as [%s]", currentField)); } } return new Suggest(suggestions); } @SuppressWarnings({"unchecked", "rawtypes"}) public static List<Suggestion<? extends Entry<? extends Option>>> reduce(Map<String, List<Suggest.Suggestion>> groupedSuggestions) { List<Suggestion<? extends Entry<? extends Option>>> reduced = new ArrayList<>(groupedSuggestions.size()); for (Map.Entry<String, List<Suggestion>> unmergedResults : groupedSuggestions.entrySet()) { List<Suggestion> value = unmergedResults.getValue(); Class<? extends Suggestion> suggestionClass = null; for (Suggestion suggestion : value) { if (suggestionClass == null) { suggestionClass = suggestion.getClass(); } else if (suggestionClass != suggestion.getClass()) { throw new IllegalArgumentException( "detected mixed suggestion results, due to querying on old and new completion suggester," + " query on a single completion suggester version"); } } Suggestion reduce = value.get(0).reduce(value); reduce.trim(); reduced.add(reduce); } return reduced; } /** * @return only suggestions of type <code>suggestionType</code> contained in this {@link Suggest} instance */ @SuppressWarnings({"rawtypes", "unchecked"}) public <T extends Suggestion> List<T> filter(Class<T> suggestionType) { return suggestions.stream() .filter(suggestion -> suggestion.getClass() == suggestionType) .map(suggestion -> (T) suggestion) .collect(Collectors.toList()); } @Override public boolean equals(Object other) { if (this == other) { return true; } if (other == null || getClass() != other.getClass()) { return false; } return Objects.equals(suggestions, ((Suggest) other).suggestions); } @Override public int hashCode() { return Objects.hash(suggestions); } /** * The suggestion responses corresponding with the suggestions in the request. */ @SuppressWarnings("rawtypes") public abstract static class Suggestion<T extends Suggestion.Entry> implements Iterable<T>, NamedWriteable, ToXContentFragment { public static final int TYPE = 0; protected final String name; protected final int size; protected final List<T> entries = new ArrayList<>(5); public Suggestion(String name, int size) { this.name = name; this.size = size; // The suggested term size specified in request, only used for merging shard responses } public Suggestion(StreamInput in) throws IOException { name = in.readString(); size = in.readVInt(); int entriesCount = in.readVInt(); entries.clear(); for (int i = 0; i < entriesCount; i++) { T newEntry = newEntry(in); entries.add(newEntry); } } public void addTerm(T entry) { entries.add(entry); } /** * Returns a integer representing the type of the suggestion. This is used for * internal serialization over the network. * * This class is now serialized as a NamedWriteable and this method only remains for backwards compatibility */ @Deprecated public int getWriteableType() { return TYPE; } @Override public Iterator<T> iterator() { return entries.iterator(); } /** * @return The entries for this suggestion. */ public List<T> getEntries() { return entries; } /** * @return The name of the suggestion as is defined in the request. */ public String getName() { return name; } /** * @return The number of requested suggestion option size */ public int getSize() { return size; } /** * Merges the result of another suggestion into this suggestion. * For internal usage. */ @SuppressWarnings("unchecked") public Suggestion<T> reduce(List<Suggestion<T>> toReduce) { if (toReduce.size() == 1) { return toReduce.get(0); } else if (toReduce.isEmpty()) { return null; } Suggestion<T> leader = toReduce.get(0); List<T> entries = leader.entries; final int size = entries.size(); Comparator<Option> sortComparator = sortComparator(); List<T> currentEntries = new ArrayList<>(); for (int i = 0; i < size; i++) { for (Suggestion<T> suggestion : toReduce) { if(suggestion.entries.size() != size) { throw new IllegalStateException("Can't merge suggest result, this might be caused by suggest calls " + "across multiple indices with different analysis chains. Suggest entries have different sizes actual [" + suggestion.entries.size() + "] expected [" + size +"]"); } assert suggestion.name.equals(leader.name); currentEntries.add(suggestion.entries.get(i)); } T entry = (T) entries.get(i).reduce(currentEntries); entry.sort(sortComparator); entries.set(i, entry); currentEntries.clear(); } return leader; } protected Comparator<Option> sortComparator() { return COMPARATOR; } /** * Trims the number of options per suggest text term to the requested size. * For internal usage. */ public void trim() { for (Entry<?> entry : entries) { entry.trim(size); } } protected abstract T newEntry(StreamInput in) throws IOException; @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(name); out.writeVInt(size); out.writeList(entries); } @Override public abstract String getWriteableName(); @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { if (params.paramAsBoolean(RestSearchAction.TYPED_KEYS_PARAM, false)) { // Concatenates the type and the name of the suggestion (ex: completion#foo) builder.startArray(String.join(Aggregation.TYPED_KEYS_DELIMITER, getWriteableName(), getName())); } else { builder.startArray(getName()); } for (Entry<?> entry : entries) { builder.startObject(); entry.toXContent(builder, params); builder.endObject(); } builder.endArray(); return builder; } @Override @SuppressWarnings("rawtypes") public boolean equals(Object other) { if (this == other) { return true; } if (other == null || getClass() != other.getClass()) { return false; } Suggestion otherSuggestion = (Suggestion) other; return Objects.equals(name, otherSuggestion.name) && Objects.equals(size, otherSuggestion.size) && Objects.equals(entries, otherSuggestion.entries); } @Override public int hashCode() { return Objects.hash(name, size, entries); } @SuppressWarnings("unchecked") public static Suggestion<? extends Entry<? extends Option>> fromXContent(XContentParser parser) throws IOException { ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser); SetOnce<Suggestion> suggestion = new SetOnce<>(); XContentParserUtils.parseTypedKeysObject(parser, Aggregation.TYPED_KEYS_DELIMITER, Suggestion.class, suggestion::set); return suggestion.get(); } protected static <E extends Suggestion.Entry<?>> void parseEntries(XContentParser parser, Suggestion<E> suggestion, CheckedFunction<XContentParser, E, IOException> entryParser) throws IOException { ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser); while ((parser.nextToken()) != XContentParser.Token.END_ARRAY) { suggestion.addTerm(entryParser.apply(parser)); } } /** * Represents a part from the suggest text with suggested options. */ public abstract static class Entry<O extends Option> implements Iterable<O>, Writeable, ToXContentFragment { private static final String TEXT = "text"; private static final String OFFSET = "offset"; private static final String LENGTH = "length"; protected static final String OPTIONS = "options"; protected Text text; protected int offset; protected int length; protected List<O> options = new ArrayList<>(5); public Entry(Text text, int offset, int length) { this.text = text; this.offset = offset; this.length = length; } protected Entry() {} public Entry(StreamInput in) throws IOException { text = in.readText(); offset = in.readVInt(); length = in.readVInt(); int suggestedWords = in.readVInt(); options = new ArrayList<>(suggestedWords); for (int j = 0; j < suggestedWords; j++) { O newOption = newOption(in); options.add(newOption); } } public void addOption(O option) { options.add(option); } protected void addOptions(List<O> options) { for (O option : options) { addOption(option); } } protected void sort(Comparator<O> comparator) { CollectionUtil.timSort(options, comparator); } protected <T extends Entry<O>> Entry<O> reduce(List<T> toReduce) { if (toReduce.size() == 1) { return toReduce.get(0); } final Map<O, O> entries = new HashMap<>(); Entry<O> leader = toReduce.get(0); for (Entry<O> entry : toReduce) { if (!leader.text.equals(entry.text)) { throw new IllegalStateException("Can't merge suggest entries, this might be caused by suggest calls " + "across multiple indices with different analysis chains. Suggest entries have different text actual [" + entry.text + "] expected [" + leader.text +"]"); } assert leader.offset == entry.offset; assert leader.length == entry.length; leader.merge(entry); for (O option : entry) { O merger = entries.get(option); if (merger == null) { entries.put(option, option); } else { merger.mergeInto(option); } } } leader.options.clear(); for (O option: entries.keySet()) { leader.addOption(option); } return leader; } /** * Merge any extra fields for this subtype. */ protected void merge(Entry<O> other) { } /** * @return the text (analyzed by suggest analyzer) originating from the suggest text. Usually this is a * single term. */ public Text getText() { return text; } /** * @return the start offset (not analyzed) for this entry in the suggest text. */ public int getOffset() { return offset; } /** * @return the length (not analyzed) for this entry in the suggest text. */ public int getLength() { return length; } @Override public Iterator<O> iterator() { return options.iterator(); } /** * @return The suggested options for this particular suggest entry. If there are no suggested terms then * an empty list is returned. */ public List<O> getOptions() { return options; } void trim(int size) { int optionsToRemove = Math.max(0, options.size() - size); for (int i = 0; i < optionsToRemove; i++) { options.remove(options.size() - 1); } } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } Entry<?> entry = (Entry<?>) o; return Objects.equals(length, entry.length) && Objects.equals(offset, entry.offset) && Objects.equals(text, entry.text) && Objects.equals(options, entry.options); } @Override public int hashCode() { return Objects.hash(text, offset, length, options); } protected abstract O newOption(StreamInput in) throws IOException; @Override public void writeTo(StreamOutput out) throws IOException { out.writeText(text); out.writeVInt(offset); out.writeVInt(length); out.writeVInt(options.size()); for (Option option : options) { option.writeTo(out); } } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.field(TEXT, text); builder.field(OFFSET, offset); builder.field(LENGTH, length); builder.startArray(OPTIONS); for (Option option : options) { builder.startObject(); option.toXContent(builder, params); builder.endObject(); } builder.endArray(); return builder; } protected static void declareCommonFields(ObjectParser<? extends Entry<? extends Option>, Void> parser) { parser.declareString((entry, text) -> entry.text = new Text(text), new ParseField(TEXT)); parser.declareInt((entry, offset) -> entry.offset = offset, new ParseField(OFFSET)); parser.declareInt((entry, length) -> entry.length = length, new ParseField(LENGTH)); } /** * Contains the suggested text with its document frequency and score. */ public abstract static class Option implements Writeable, ToXContentFragment { public static final ParseField TEXT = new ParseField("text"); public static final ParseField HIGHLIGHTED = new ParseField("highlighted"); public static final ParseField SCORE = new ParseField("score"); public static final ParseField COLLATE_MATCH = new ParseField("collate_match"); private final Text text; private final Text highlighted; private float score; private Boolean collateMatch; public Option(Text text, Text highlighted, float score, Boolean collateMatch) { this.text = text; this.highlighted = highlighted; this.score = score; this.collateMatch = collateMatch; } public Option(Text text, Text highlighted, float score) { this(text, highlighted, score, null); } public Option(Text text, float score) { this(text, null, score); } public Option(StreamInput in) throws IOException { text = in.readText(); score = in.readFloat(); highlighted = in.readOptionalText(); collateMatch = in.readOptionalBoolean(); } /** * @return The actual suggested text. */ public Text getText() { return text; } /** * @return Copy of suggested text with changes from user supplied text highlighted. */ public Text getHighlighted() { return highlighted; } /** * @return The score based on the edit distance difference between the suggested term and the * term in the suggest text. */ public float getScore() { return score; } /** * @return true if collation has found a match for the entry. * if collate was not set, the value defaults to <code>true</code> */ public boolean collateMatch() { return (collateMatch != null) ? collateMatch : true; } protected void setScore(float score) { this.score = score; } @Override public void writeTo(StreamOutput out) throws IOException { out.writeText(text); out.writeFloat(score); out.writeOptionalText(highlighted); out.writeOptionalBoolean(collateMatch); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.field(TEXT.getPreferredName(), text); if (highlighted != null) { builder.field(HIGHLIGHTED.getPreferredName(), highlighted); } builder.field(SCORE.getPreferredName(), score); if (collateMatch != null) { builder.field(COLLATE_MATCH.getPreferredName(), collateMatch.booleanValue()); } return builder; } protected void mergeInto(Option otherOption) { score = Math.max(score, otherOption.score); if (otherOption.collateMatch != null) { if (collateMatch == null) { collateMatch = otherOption.collateMatch; } else { collateMatch |= otherOption.collateMatch; } } } /* * We consider options equal if they have the same text, even if their other fields may differ */ @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } Option that = (Option) o; return Objects.equals(text, that.text); } @Override public int hashCode() { return Objects.hash(text); } } } } @Override public String toString() { return Strings.toString(this, true, true); } }
/* * Copyright 2000-2016 Vaadin Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.vaadin.server; import java.io.Serializable; import java.util.Date; import java.util.Locale; import java.util.TimeZone; import com.vaadin.shared.VBrowserDetails; /** * Class that provides information about the web browser the user is using. * Provides information such as browser name and version, screen resolution and * IP address. * * @author Vaadin Ltd. */ public class WebBrowser implements Serializable { private int screenHeight = -1; private int screenWidth = -1; private String browserApplication = null; private Locale locale; private String address; private boolean secureConnection; private int timezoneOffset = 0; private int rawTimezoneOffset = 0; private int dstSavings; private boolean dstInEffect; private String timeZoneId; private boolean touchDevice; private VBrowserDetails browserDetails; private long clientServerTimeDelta; /** * Gets the height of the screen in pixels. This is the full screen * resolution and not the height available for the application. * * @return the height of the screen in pixels. */ public int getScreenHeight() { return screenHeight; } /** * Gets the width of the screen in pixels. This is the full screen * resolution and not the width available for the application. * * @return the width of the screen in pixels. */ public int getScreenWidth() { return screenWidth; } /** * Get the browser user-agent string. * * @return The raw browser userAgent string */ public String getBrowserApplication() { return browserApplication; } /** * Gets the IP-address of the web browser. If the application is running * inside a portlet, this method will return null. * * @return IP-address in 1.12.123.123 -format */ public String getAddress() { return address; } /** Get the default locate of the browser. */ public Locale getLocale() { return locale; } /** Is the connection made using HTTPS? */ public boolean isSecureConnection() { return secureConnection; } /** * Tests whether the user is using Firefox. * * @return true if the user is using Firefox, false if the user is not using * Firefox or if no information on the browser is present */ public boolean isFirefox() { if (browserDetails == null) { return false; } return browserDetails.isFirefox(); } /** * Tests whether the user is using Internet Explorer. * * @return true if the user is using Internet Explorer, false if the user is * not using Internet Explorer or if no information on the browser * is present */ public boolean isIE() { if (browserDetails == null) { return false; } return browserDetails.isIE(); } /** * Tests whether the user is using Edge. * * @since 7.5.3 * @return true if the user is using Edge, false if the user is not using * Edge or if no information on the browser is present */ public boolean isEdge() { if (browserDetails == null) { return false; } return browserDetails.isEdge(); } /** * Tests whether the user is using Safari. Note that Chrome on iOS is not * detected as Safari but as Chrome although the underlying browser engine * is the same. * * @return true if the user is using Safari, false if the user is not using * Safari or if no information on the browser is present */ public boolean isSafari() { if (browserDetails == null) { return false; } return browserDetails.isSafari(); } /** * Tests whether the user is using Opera. * * @return true if the user is using Opera, false if the user is not using * Opera or if no information on the browser is present */ public boolean isOpera() { if (browserDetails == null) { return false; } return browserDetails.isOpera(); } /** * Tests whether the user is using Chrome. * * @return true if the user is using Chrome, false if the user is not using * Chrome or if no information on the browser is present */ public boolean isChrome() { if (browserDetails == null) { return false; } return browserDetails.isChrome(); } /** * Tests whether the user is using Chrome Frame. * * @return true if the user is using Chrome Frame, false if the user is not * using Chrome or if no information on the browser is present */ public boolean isChromeFrame() { if (browserDetails == null) { return false; } return browserDetails.isChromeFrame(); } /** * Tests whether the user's browser is Chrome Frame capable. * * @return true if the user can use Chrome Frame, false if the user can not * or if no information on the browser is present */ public boolean isChromeFrameCapable() { if (browserDetails == null) { return false; } return browserDetails.isChromeFrameCapable(); } /** * Gets the major version of the browser the user is using. * * <p> * Note that Internet Explorer in IE7 compatibility mode might return 8 in * some cases even though it should return 7. * </p> * * @return The major version of the browser or -1 if not known. */ public int getBrowserMajorVersion() { if (browserDetails == null) { return -1; } return browserDetails.getBrowserMajorVersion(); } /** * Gets the minor version of the browser the user is using. * * @see #getBrowserMajorVersion() * * @return The minor version of the browser or -1 if not known. */ public int getBrowserMinorVersion() { if (browserDetails == null) { return -1; } return browserDetails.getBrowserMinorVersion(); } /** * Gets the complete browser version as string. The version is given by the * browser through the user agent string and usually consists of * dot-separated numbers. Note that the string may contain characters other * than dots and digits. * * @return the complete browser version or {@code null} if unknown * @since */ public String getBrowserVersion() { return browserDetails != null ? browserDetails.getBrowserVersion() : null; } /** * Tests whether the user is using Linux. * * @return true if the user is using Linux, false if the user is not using * Linux or if no information on the browser is present */ public boolean isLinux() { return browserDetails.isLinux(); } /** * Tests whether the user is using Mac OS X. * * @return true if the user is using Mac OS X, false if the user is not * using Mac OS X or if no information on the browser is present */ public boolean isMacOSX() { return browserDetails.isMacOSX(); } /** * Tests whether the user is using Windows. * * @return true if the user is using Windows, false if the user is not using * Windows or if no information on the browser is present */ public boolean isWindows() { return browserDetails.isWindows(); } /** * Tests whether the user is using Windows Phone. * * @return true if the user is using Windows Phone, false if the user is not * using Windows Phone or if no information on the browser is * present * @since 7.3.2 */ public boolean isWindowsPhone() { return browserDetails.isWindowsPhone(); } /** * Tests if the browser is run on Android. * * @return true if run on Android false if the user is not using Android or * if no information on the browser is present */ public boolean isAndroid() { return browserDetails.isAndroid(); } /** * Tests if the browser is run in iOS. * * @return true if run in iOS false if the user is not using iOS or if no * information on the browser is present */ public boolean isIOS() { return browserDetails.isIOS(); } /** * Tests if the browser is run on IPhone. * * @return true if run on IPhone false if the user is not using IPhone or if * no information on the browser is present * @since 7.3.3 */ public boolean isIPhone() { return browserDetails.isIPhone(); } /** * Tests if the browser is run on IPad. * * @return true if run on IPad false if the user is not using IPad or if no * information on the browser is present * @since 7.3.3 */ public boolean isIPad() { return browserDetails.isIPad(); } /** * Tests if the browser is run on ChromeOS (e.g. a Chromebook). * * @return true if run on ChromeOS false if the user is not using ChromeOS * or if no information on the browser is present * @since 8.1.1 */ public boolean isChromeOS() { return browserDetails.isChromeOS(); } /** * Returns the browser-reported TimeZone offset in milliseconds from GMT. * This includes possible daylight saving adjustments, to figure out which * TimeZone the user actually might be in, see * {@link #getRawTimezoneOffset()}. * * @see WebBrowser#getRawTimezoneOffset() * @return timezone offset in milliseconds, 0 if not available */ public int getTimezoneOffset() { return timezoneOffset; } /** * Returns the TimeZone Id (like "Europe/Helsinki") provided by the browser * (if the browser supports this feature). * * @return the TimeZone Id if provided by the browser, null otherwise. * @see <a href= * "https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/DateTimeFormat/resolvedOptions">Intl.DateTimeFormat.prototype.resolvedOptions()</a> * @since 8.2 */ public String getTimeZoneId() { return timeZoneId; } /** * Returns the browser-reported TimeZone offset in milliseconds from GMT * ignoring possible daylight saving adjustments that may be in effect in * the browser. * <p> * You can use this to figure out which TimeZones the user could actually be * in by calling {@link TimeZone#getAvailableIDs(int)}. * </p> * <p> * If {@link #getRawTimezoneOffset()} and {@link #getTimezoneOffset()} * returns the same value, the browser is either in a zone that does not * currently have daylight saving time, or in a zone that never has daylight * saving time. * </p> * * @return timezone offset in milliseconds excluding DST, 0 if not available */ public int getRawTimezoneOffset() { return rawTimezoneOffset; } /** * Returns the offset in milliseconds between the browser's GMT TimeZone and * DST. * * @return the number of milliseconds that the TimeZone shifts when DST is * in effect */ public int getDSTSavings() { return dstSavings; } /** * Returns whether daylight saving time (DST) is currently in effect in the * region of the browser or not. * * @return true if the browser resides at a location that currently is in * DST */ public boolean isDSTInEffect() { return dstInEffect; } /** * Returns the current date and time of the browser. This will not be * entirely accurate due to varying network latencies, but should provide a * close-enough value for most cases. Also note that the returned Date * object uses servers default time zone, not the clients. * <p> * To get the actual date and time shown in the end users computer, you can * do something like: * * <pre> * WebBrowser browser = ...; * SimpleTimeZone timeZone = new SimpleTimeZone(browser.getTimezoneOffset(), "Fake client time zone"); * DateFormat format = DateFormat.getDateTimeInstance(); * format.setTimeZone(timeZone); * myLabel.setValue(format.format(browser.getCurrentDate())); * </pre> * * @return the current date and time of the browser. * @see #isDSTInEffect() * @see #getDSTSavings() * @see #getTimezoneOffset() */ public Date getCurrentDate() { return new Date(new Date().getTime() + clientServerTimeDelta); } /** * @return true if the browser is detected to support touch events */ public boolean isTouchDevice() { return touchDevice; } /** * For internal use by VaadinServlet/VaadinPortlet only. Updates all * properties in the class according to the given information. * * @param sw * Screen width * @param sh * Screen height * @param tzo * TimeZone offset in minutes from GMT * @param rtzo * raw TimeZone offset in minutes from GMT (w/o DST adjustment) * @param dstSavings * the difference between the raw TimeZone and DST in minutes * @param dstInEffect * is DST currently active in the region or not? * @param curDate * the current date in milliseconds since the epoch * @param touchDevice */ void updateClientSideDetails(String sw, String sh, String tzo, String rtzo, String dstSavings, String dstInEffect, String tzId, String curDate, boolean touchDevice) { if (sw != null) { try { screenHeight = Integer.parseInt(sh); screenWidth = Integer.parseInt(sw); } catch (final NumberFormatException e) { screenHeight = screenWidth = -1; } } if (tzo != null) { try { // browser->java conversion: min->ms, reverse sign timezoneOffset = -Integer.parseInt(tzo) * 60 * 1000; } catch (final NumberFormatException e) { timezoneOffset = 0; // default gmt+0 } } if (rtzo != null) { try { // browser->java conversion: min->ms, reverse sign rawTimezoneOffset = -Integer.parseInt(rtzo) * 60 * 1000; } catch (final NumberFormatException e) { rawTimezoneOffset = 0; // default gmt+0 } } if (dstSavings != null) { try { // browser->java conversion: min->ms this.dstSavings = Integer.parseInt(dstSavings) * 60 * 1000; } catch (final NumberFormatException e) { this.dstSavings = 0; // default no savings } } if (dstInEffect != null) { this.dstInEffect = Boolean.parseBoolean(dstInEffect); } if (tzId == null || "undefined".equals(tzId)) { timeZoneId = null; } else { timeZoneId = tzId; } if (curDate != null) { try { long curTime = Long.parseLong(curDate); clientServerTimeDelta = curTime - new Date().getTime(); } catch (final NumberFormatException e) { clientServerTimeDelta = 0; } } this.touchDevice = touchDevice; } /** * For internal use by VaadinServlet/VaadinPortlet only. Updates all * properties in the class according to the given information. * * @param request * the Vaadin request to read the information from */ public void updateRequestDetails(VaadinRequest request) { locale = request.getLocale(); address = request.getRemoteAddr(); secureConnection = request.isSecure(); // Headers are case insensitive according to the specification but are // case sensitive in Weblogic portal... String agent = request.getHeader("User-Agent"); if (agent != null) { browserApplication = agent; browserDetails = new VBrowserDetails(agent); } if (request.getParameter("v-sw") != null) { updateClientSideDetails(request.getParameter("v-sw"), request.getParameter("v-sh"), request.getParameter("v-tzo"), request.getParameter("v-rtzo"), request.getParameter("v-dstd"), request.getParameter("v-dston"), request.getParameter("v-tzid"), request.getParameter("v-curdate"), request.getParameter("v-td") != null); } } /** * Checks if the browser is so old that it simply won't work with a Vaadin * application. Can be used to redirect to an alternative page, show * alternative content or similar. * * When this method returns true chances are very high that the browser * won't work and it does not make sense to direct the user to the Vaadin * application. * * @return true if the browser won't work, false if not the browser is * supported or might work */ public boolean isTooOldToFunctionProperly() { if (browserDetails == null) { // Don't know, so assume it will work return false; } return browserDetails.isTooOldToFunctionProperly(); } /** * Checks if the browser supports ECMAScript 6, based on the user agent. * * @return <code>true</code> if the browser supports ES6, <code>false</code> * otherwise. * @since 8.1 */ public boolean isEs6Supported() { if (browserDetails == null) { // Don't know, so assume no return false; } return browserDetails.isEs6Supported(); } }
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.auth.profile; import java.io.File; import java.lang.reflect.Field; import java.net.URL; import java.util.Map; import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.auth.profile.internal.securitytoken.ProfileCredentialsService; import com.amazonaws.auth.profile.internal.securitytoken.RoleInfo; import com.amazonaws.internal.StaticCredentialsProvider; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; import com.amazonaws.auth.AWSCredentials; import com.amazonaws.auth.BasicAWSCredentials; import com.amazonaws.auth.profile.internal.Profile; public class ProfileCredentialsProviderTest { private static final String DEFAULT_PROFILE_NAME = "default"; private static String PROFILE_FILE = "ProfilesContainingOtherConfigurations.tst"; private static String profileLocation = null; @BeforeClass public static void setUp() { profileLocation = getLocationForTestFile(PROFILE_FILE); } private static String getLocationForTestFile(String name) { URL url = ProfileCredentialsProviderTest.class .getResource("/resources/profileconfig/" + name); return url.getFile(); } @Test public void testDefault() { ProfileCredentialsProvider provider = new ProfileCredentialsProvider(profileLocation, null); AWSCredentials credentials = provider.getCredentials(); // Yep, this is correct - they're backwards in // ProfilesContainingOtherConfigurations.tst Assert.assertEquals("defaultSecretAccessKey", credentials.getAWSAccessKeyId()); Assert.assertEquals("defaultAccessKey", credentials.getAWSSecretKey()); } @Test public void testEnvironmentVariable() throws Exception { Map<String, String> immutableEnv = System.getenv(); Class<?> unMap = Class.forName("java.util.Collections$UnmodifiableMap"); Field m = unMap.getDeclaredField("m"); m.setAccessible(true); @SuppressWarnings("unchecked") Map<String, String> env = (Map<String, String>) m.get(immutableEnv); try { env.put(ProfilesConfigFile.AWS_PROFILE_ENVIRONMENT_VARIABLE, "test"); ProfileCredentialsProvider provider = new ProfileCredentialsProvider(profileLocation, null); AWSCredentials credentials = provider.getCredentials(); Assert.assertEquals("test", credentials.getAWSAccessKeyId()); Assert.assertEquals("test key", credentials.getAWSSecretKey()); } finally { env.remove(ProfilesConfigFile.AWS_PROFILE_ENVIRONMENT_VARIABLE); } } @Test public void testSystemProperty() { try { System.setProperty( ProfilesConfigFile.AWS_PROFILE_SYSTEM_PROPERTY, "test"); ProfileCredentialsProvider provider = new ProfileCredentialsProvider(profileLocation, null); AWSCredentials credentials = provider.getCredentials(); Assert.assertEquals("test", credentials.getAWSAccessKeyId()); Assert.assertEquals("test key", credentials.getAWSSecretKey()); } finally { System.setProperty( ProfilesConfigFile.AWS_PROFILE_SYSTEM_PROPERTY, ""); } } @Test public void testBoth() throws Exception { Map<String, String> immutableEnv = System.getenv(); Class<?> unMap = Class.forName("java.util.Collections$UnmodifiableMap"); Field m = unMap.getDeclaredField("m"); m.setAccessible(true); @SuppressWarnings("unchecked") Map<String, String> env = (Map<String, String>) m.get(immutableEnv); try { // If both are set, env var should take precedence. env.put(ProfilesConfigFile.AWS_PROFILE_ENVIRONMENT_VARIABLE, "test"); System.setProperty( ProfilesConfigFile.AWS_PROFILE_SYSTEM_PROPERTY, "bogus"); ProfileCredentialsProvider provider = new ProfileCredentialsProvider(profileLocation, null); AWSCredentials credentials = provider.getCredentials(); Assert.assertEquals("test", credentials.getAWSAccessKeyId()); Assert.assertEquals("test key", credentials.getAWSSecretKey()); } finally { System.setProperty( ProfilesConfigFile.AWS_PROFILE_SYSTEM_PROPERTY, ""); env.remove(ProfilesConfigFile.AWS_PROFILE_ENVIRONMENT_VARIABLE); } } @Test public void testExplicit() throws Exception { Map<String, String> immutableEnv = System.getenv(); Class<?> unMap = Class.forName("java.util.Collections$UnmodifiableMap"); Field m = unMap.getDeclaredField("m"); m.setAccessible(true); @SuppressWarnings("unchecked") Map<String, String> env = (Map<String, String>) m.get(immutableEnv); try { env.put(ProfilesConfigFile.AWS_PROFILE_ENVIRONMENT_VARIABLE, "test"); System.setProperty( ProfilesConfigFile.AWS_PROFILE_SYSTEM_PROPERTY, "test"); // If an explicit override is provided, that beats anything else. ProfileCredentialsProvider provider = new ProfileCredentialsProvider(profileLocation, "bogus"); try { provider.getCredentials(); Assert.fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException expected) { } } finally { System.setProperty( ProfilesConfigFile.AWS_PROFILE_SYSTEM_PROPERTY, ""); env.remove(ProfilesConfigFile.AWS_PROFILE_ENVIRONMENT_VARIABLE); } } @Test public void testUpdate() throws Exception { ProfilesConfigFile fixture = new ProfilesConfigFile(getLocationForTestFile("BasicProfile.tst")); File modifiable = File.createTempFile("UpdatableProfile", ".tst"); ProfilesConfigFileWriter.dumpToFile(modifiable, true, fixture.getAllProfiles().values().toArray(new Profile[1])); ProfileCredentialsProvider test = new ProfileCredentialsProvider(modifiable.getPath(), null); AWSCredentials orig = test.getCredentials(); Assert.assertEquals("defaultAccessKey", orig.getAWSAccessKeyId()); Assert.assertEquals("defaultSecretAccessKey", orig.getAWSSecretKey()); //Sleep to ensure that the timestamp on the file (when we modify it) is //distinguishably later from the original write. try { Thread.sleep(2000); } catch (Exception e) {} ProfilesConfigFileWriter.modifyOneProfile(modifiable, DEFAULT_PROFILE_NAME, new Profile(DEFAULT_PROFILE_NAME, new BasicAWSCredentials("newAccessKey", "newSecretKey"))); test.refresh(); AWSCredentials updated = test.getCredentials(); Assert.assertEquals("newAccessKey", updated.getAWSAccessKeyId()); Assert.assertEquals("newSecretKey", updated.getAWSSecretKey()); } @Test public void testForcedRefresh() throws Exception { ProfilesConfigFile profilesConfigFileBeforeRefresh = new ProfilesConfigFile(getLocationForTestFile("BasicProfile.tst")); File profilesFile = File.createTempFile("UpdatableProfile", ".tst"); ProfilesConfigFileWriter.dumpToFile(profilesFile, true, profilesConfigFileBeforeRefresh.getAllProfiles().values().toArray(new Profile[1])); ProfileCredentialsProvider profileCredentialsProvider = new ProfileCredentialsProvider(profilesFile.getPath(), null); /* * Sleep for 1 second so that the profiles file last modified time has a chance to update. * If this wait is not here, com.amazonaws.auth.profile.ProfilesConfigFile.refresh() profileFile.lastModified() will not be updated, therefore the * credentials will not refresh. * * This is also in testRefresh() */ Thread.sleep(1000); ProfilesConfigFile profilesConfigFileAfterRefresh = new ProfilesConfigFile(getLocationForTestFile("BasicProfile2.tst")); ProfilesConfigFileWriter.dumpToFile(profilesFile, true, profilesConfigFileAfterRefresh.getAllProfiles().values().toArray(new Profile[1])); profileCredentialsProvider.setRefreshForceIntervalNanos(1l); AWSCredentials credentialsAfterRefresh = profileCredentialsProvider.getCredentials(); Assert.assertEquals("credentialsAfterRefresh AWSAccessKeyId", "accessKey2", credentialsAfterRefresh.getAWSAccessKeyId()); Assert.assertEquals("credentialsAfterRefresh AWSSecretKey", "secretAccessKey2", credentialsAfterRefresh.getAWSSecretKey()); } @Test public void testRefresh() throws Exception { ProfilesConfigFile profilesConfigFileBeforeRefresh = new ProfilesConfigFile(getLocationForTestFile("BasicProfile.tst")); File profilesFile = File.createTempFile("UpdatableProfile", ".tst"); ProfilesConfigFileWriter.dumpToFile(profilesFile, true, profilesConfigFileBeforeRefresh.getAllProfiles().values().toArray(new Profile[1])); ProfileCredentialsProvider profileCredentialsProvider = new ProfileCredentialsProvider(profilesFile.getPath(), null); Thread.sleep(1000); // see testForcedRefresh() ProfilesConfigFile profilesConfigFileAfterRefresh = new ProfilesConfigFile(getLocationForTestFile("BasicProfile2.tst")); ProfilesConfigFileWriter.dumpToFile(profilesFile, true, profilesConfigFileAfterRefresh.getAllProfiles().values().toArray(new Profile[1])); profileCredentialsProvider.setRefreshIntervalNanos(1l); AWSCredentials credentialsAfterRefresh = profileCredentialsProvider.getCredentials(); Assert.assertEquals("credentialsAfterRefresh AWSAccessKeyId", "accessKey2", credentialsAfterRefresh.getAWSAccessKeyId()); Assert.assertEquals("credentialsAfterRefresh AWSSecretKey", "secretAccessKey2", credentialsAfterRefresh.getAWSSecretKey()); } @Test public void testAssumeRole() throws Exception { ProfilesConfigFile profilesFile = new ProfilesConfigFile(getLocationForTestFile("ProfileWithRole.tst"), new ProfileCredentialsService() { @Override public AWSCredentialsProvider getAssumeRoleCredentialsProvider(RoleInfo targetRoleInfo) { AWSCredentials credentials = targetRoleInfo.getLongLivedCredentialsProvider().getCredentials(); Assert.assertEquals("sourceProfile AWSAccessKeyId", "defaultAccessKey", credentials.getAWSAccessKeyId()); Assert.assertEquals("sourceProfile AWSSecretKey", "defaultSecretAccessKey", credentials.getAWSSecretKey()); Assert.assertEquals("role_arn", "arn:aws:iam::123456789012:role/testRole", targetRoleInfo.getRoleArn()); Assert.assertNull("external_id", targetRoleInfo.getExternalId()); Assert.assertTrue("role_session_name", targetRoleInfo.getRoleSessionName().startsWith("aws-sdk-java-")); return new StaticCredentialsProvider(new BasicAWSCredentials("sessionAccessKey", "sessionSecretKey")); } }); ProfileCredentialsProvider profileCredentialsProvider = new ProfileCredentialsProvider(profilesFile, "test"); AWSCredentials credentials = profileCredentialsProvider.getCredentials(); Assert.assertEquals("sessionAccessKey", credentials.getAWSAccessKeyId()); Assert.assertEquals("sessionSecretKey", credentials.getAWSSecretKey()); } @Test public void testAssumeRoleWithNameAndExternalId() throws Exception { ProfilesConfigFile profilesFile = new ProfilesConfigFile(getLocationForTestFile("ProfileWithRole2.tst"), new ProfileCredentialsService() { @Override public AWSCredentialsProvider getAssumeRoleCredentialsProvider(RoleInfo targetRoleInfo) { AWSCredentials credentials = targetRoleInfo.getLongLivedCredentialsProvider().getCredentials(); Assert.assertEquals("sourceProfile AWSAccessKeyId", "defaultAccessKey", credentials.getAWSAccessKeyId()); Assert.assertEquals("sourceProfile AWSSecretKey", "defaultSecretAccessKey", credentials.getAWSSecretKey()); Assert.assertEquals("role_arn", "arn:aws:iam::123456789012:role/testRole", targetRoleInfo.getRoleArn()); Assert.assertEquals("external_id", "testExternalId", targetRoleInfo.getExternalId()); Assert.assertEquals("role_session_name", "testSessionName", targetRoleInfo.getRoleSessionName()); return new StaticCredentialsProvider(new BasicAWSCredentials("sessionAccessKey", "sessionSecretKey")); } }); ProfileCredentialsProvider profileCredentialsProvider = new ProfileCredentialsProvider(profilesFile, "test"); AWSCredentials credentials = profileCredentialsProvider.getCredentials(); Assert.assertEquals("sessionAccessKey", credentials.getAWSAccessKeyId()); Assert.assertEquals("sessionSecretKey", credentials.getAWSSecretKey()); } @Test public void testAssumeRoleWithSourceAfterRole() throws Exception { ProfilesConfigFile profilesFile = new ProfilesConfigFile(getLocationForTestFile("ProfileWithSourceAfterRole.tst"), new ProfileCredentialsService() { @Override public AWSCredentialsProvider getAssumeRoleCredentialsProvider(RoleInfo targetRoleInfo) { AWSCredentials credentials = targetRoleInfo.getLongLivedCredentialsProvider().getCredentials(); Assert.assertEquals("sourceProfile AWSAccessKeyId", "defaultAccessKey", credentials.getAWSAccessKeyId()); Assert.assertEquals("sourceProfile AWSSecretKey", "defaultSecretAccessKey", credentials.getAWSSecretKey()); Assert.assertEquals("role_arn", "arn:aws:iam::123456789012:role/testRole", targetRoleInfo.getRoleArn()); Assert.assertNull("external_id", targetRoleInfo.getExternalId()); Assert.assertTrue("role_session_name", targetRoleInfo.getRoleSessionName().startsWith("aws-sdk-java-")); return new StaticCredentialsProvider(new BasicAWSCredentials("sessionAccessKey", "sessionSecretKey")); } }); ProfileCredentialsProvider profileCredentialsProvider = new ProfileCredentialsProvider(profilesFile, "test"); AWSCredentials credentials = profileCredentialsProvider.getCredentials(); Assert.assertEquals("sessionAccessKey", credentials.getAWSAccessKeyId()); Assert.assertEquals("sessionSecretKey", credentials.getAWSSecretKey()); } }
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.editor; import com.intellij.openapi.actionSystem.IdeActions; import com.intellij.openapi.actionSystem.MouseShortcut; import com.intellij.openapi.editor.ex.EditorEx; import com.intellij.openapi.editor.ex.EditorSettingsExternalizable; import com.intellij.openapi.editor.impl.AbstractEditorTest; import com.intellij.openapi.ide.CopyPasteManager; import com.intellij.openapi.keymap.Keymap; import com.intellij.openapi.keymap.KeymapManager; import com.intellij.testFramework.EditorTestUtil; import com.intellij.testFramework.fixtures.EditorMouseFixture; import com.intellij.util.ThrowableRunnable; import java.awt.datatransfer.StringSelection; import java.awt.event.InputEvent; import java.util.Arrays; public class EditorMultiCaretTest extends AbstractEditorTest { private boolean myStoredVirtualSpaceSetting; @Override public void setUp() throws Exception { super.setUp(); myStoredVirtualSpaceSetting = EditorSettingsExternalizable.getInstance().isVirtualSpace(); EditorSettingsExternalizable.getInstance().setVirtualSpace(false); } @Override public void tearDown() throws Exception { try { EditorSettingsExternalizable.getInstance().setVirtualSpace(myStoredVirtualSpaceSetting); } catch (Throwable e) { addSuppressedException(e); } finally { super.tearDown(); } } public void testCaretAddingAndRemoval() { initText("some <selection>t<caret>ext</selection>\n" + "another line"); mouse().alt().shift().clickAt(1, 1); // alt-shift-click in a 'free space' checkResultByText("some <selection>t<caret>ext</selection>\n" + "a<caret>nother line"); mouse().alt().shift().clickAt(0, 8); // alt-shift-click in existing selection checkResultByText("some <selection>t<caret>ext</selection>\n" + "a<caret>nother line"); mouse().alt().shift().clickAt(0, 6); // alt-shift-click at existing caret with selection checkResultByText("some text\n" + "a<caret>nother line"); mouse().alt().shift().clickAt(1, 1); // alt-shift-click at the sole caret checkResultByText("some text\n" + "a<caret>nother line"); mouse().alt().shift().clickAt(0, 30); // alt-shift-click in virtual space checkResultByText("some text<caret>\n" + "a<caret>nother line"); mouse().clickAt(0, 0); // plain mouse click checkResultByText("<caret>some text\n" + "another line"); } public void testCustomShortcut() throws Throwable { doWithAltClickShortcut(() -> { initText("<caret>text"); mouse().alt().clickAt(0, 2); checkResultByText("<caret>te<caret>xt"); }); } public void testCaretRemovalWithCustomShortcutDoesntAffectOtherSelections() throws Throwable { doWithAltClickShortcut(() -> { initText("<selection>some<caret></selection> text"); mouse().alt().clickAt(0, 6); mouse().alt().clickAt(0, 6); checkResultByText("<selection>some<caret></selection> text"); }); } public void testAltDragStartingFromWithinLine() { initText("<caret>line\n" + "long line\n" + "very long line\n" + "long line\n" + "line"); setEditorVisibleSize(1000, 1000); EditorMouseFixture mouse = mouse(); mouse.alt().pressAt(1, 6); checkResultByText("line\n" + "long l<caret>ine\n" + "very long line\n" + "long line\n" + "line"); mouse.dragTo(4, 6); // still holding Alt checkResultByText("line\n" + "long l<caret>ine\n" + "very l<caret>ong line\n" + "long l<caret>ine\n" + "line<caret>"); mouse.dragTo(4, 8); // still holding Alt checkResultByText("line\n" + "long l<selection>in<caret></selection>e\n" + "very l<selection>on<caret></selection>g line\n" + "long l<selection>in<caret></selection>e\n" + "line"); mouse.dragTo(4, 10).release(); // still holding Alt checkResultByText("line\n" + "long l<selection>ine<caret></selection>\n" + "very l<selection>ong <caret></selection>line\n" + "long l<selection>ine<caret></selection>\n" + "line"); } public void testMiddleButtonDragStartingFromVirtualSpace() { initText("<caret>line\n" + "long line\n" + "very long line\n" + "long line\n" + "line"); setEditorVisibleSize(1000, 1000); EditorMouseFixture mouse = mouse(); mouse.middle().pressAt(1, 17); checkResultByText("line\n" + "long line<caret>\n" + "very long line\n" + "long line\n" + "line"); mouse.dragTo(2, 16); checkResultByText("line\n" + "long line<caret>\n" + "very long line<caret>\n" + "long line\n" + "line"); mouse.dragTo(3, 12); checkResultByText("line\n" + "long line\n" + "very long li<selection><caret>ne</selection>\n" + "long line\n" + "line"); mouse.dragTo(3, 6).release(); checkResultByText("line\n" + "long l<selection><caret>ine</selection>\n" + "very l<selection><caret>ong line</selection>\n" + "long l<selection><caret>ine</selection>\n" + "line"); } public void testAltOnOffWhileDragging() { initText("line1\n" + "line2\n" + "line3"); setEditorVisibleSize(1000, 1000); EditorMouseFixture mouse = mouse(); mouse.pressAt(0, 1).dragTo(1, 2); checkResultByText("l<selection>ine1\n" + "li<caret></selection>ne2\n" + "line3"); mouse.alt().dragTo(1, 3); checkResultByText("l<selection>in<caret></selection>e1\n" + "l<selection>in<caret></selection>e2\n" + "line3"); mouse.noModifiers().dragTo(2, 4).release(); checkResultByText("l<selection>ine1\n" + "line2\n" + "line<caret></selection>3"); } public void testTyping() { initText("some<caret> text<caret>\n" + "some <selection><caret>other</selection> <selection>text<caret></selection>\n" + "<selection>ano<caret>ther</selection> line"); type('A'); checkResultByText("someA<caret> textA<caret>\n" + "some A<caret> A<caret>\n" + "A<caret> line"); } public void testCopyPaste() { initText("<selection><caret>one</selection> two \n" + "<selection><caret>three</selection> four "); executeAction("EditorCopy"); executeAction("EditorLineEnd"); executeAction("EditorPaste"); checkResultByText("one twoone<caret> \n" + "three fourthree<caret> "); } public void testCutAndPaste() { initText("<selection>one<caret></selection> two \n" + "<selection>three<caret></selection> four "); executeAction("EditorCut"); executeAction("EditorLineEnd"); executeAction("EditorPaste"); checkResultByText(" twoone<caret> \n" + " fourthree<caret> "); } public void testPasteSingleItem() { initText("<selection>one<caret></selection> two \n" + "three four "); executeAction("EditorCopy"); executeAction("EditorCloneCaretBelow"); executeAction("EditorLineEnd"); executeAction("EditorPaste"); checkResultByText("one twoone<caret> \n" + "three fourone<caret> "); } public void testCutAndPasteMultiline() { initText("one <selection>two \n" + "three<caret></selection> four \n" + "five <selection>six \n" + "seven<caret></selection> eight"); executeAction("EditorCut"); executeAction("EditorLineEnd"); executeAction("EditorPaste"); checkResultByText("one fourtwo \n" + "three<caret> \n" + "five eightsix \n" + "seven<caret>"); } public void testCopyMultilineFromOneCaretPasteIntoTwo() { initText("<selection>one\n" + "two<caret></selection>\n" + "three\n" + "four"); executeAction("EditorCopy"); executeAction("EditorTextStart"); executeAction("EditorCloneCaretBelow"); executeAction("EditorPaste"); checkResultByText("one\n" + "two<caret>one\n" + "one\n" + "two<caret>two\n" + "three\n" + "four"); } public void testCopyPasteDoesNothingWithUnevenSelection() { initText("<selection>one\n" + "two<caret></selection>\n" + "<selection>three<caret></selection>\n" + "four"); executeAction("EditorCopy"); executeAction("EditorPaste"); checkResultByText("one\n" + "two<caret>\n" + "three<caret>\n" + "four"); } public void testPastingAtDifferentNumberOfCarets() { initText("<selection>one<caret></selection>\n" + "<selection>two<caret></selection>\n" + "<selection>three<caret></selection>\n" + "<selection>four<caret></selection>"); copy(); myEditor.getCaretModel().setCaretsAndSelections(Arrays.asList(new CaretState(new LogicalPosition(0, 0), new LogicalPosition(0, 0), new LogicalPosition(0, 0)), new CaretState(new LogicalPosition(1, 0), new LogicalPosition(1, 0), new LogicalPosition(1, 0)))); paste(); checkResultByText("oneone\n" + "twotwo\n" + "three\n" + "four"); } public void testPastingLineWithBreakFromOutside() { initText("<caret>\n" + "<caret>"); CopyPasteManager.getInstance().setContents(new StringSelection("abc\n")); paste(); checkResultByText("abc<caret>\n" + "abc<caret>"); } public void testEscapeAfterDragDown() { initText("line1\n" + "line2"); setEditorVisibleSize(1000, 1000); mouse().alt().pressAt(0, 1).dragTo(1, 2).release(); executeAction("EditorEscape"); checkResultByText("li<caret>ne1\n" + "line2"); } public void testEscapeAfterDragUp() { initText("line1\n" + "line2"); setEditorVisibleSize(1000, 1000); mouse().alt().pressAt(1, 1).dragTo(0, 2).release(); executeAction("EditorEscape"); checkResultByText("line1\n" + "li<caret>ne2"); } public void testAltShiftDoubleClick() { initText("q<caret>uick brown fox"); mouse().alt().shift().doubleClickAt(0, 8); checkResultByText("q<caret>uick <selection>brown<caret></selection> fox"); } public void testAltShiftDoubleClickAtExistingCaret() { initText("q<caret>uick br<caret>own fox"); mouse().alt().shift().doubleClickAt(0, 8); checkResultByText("q<caret>uick brown fox"); } public void testAltShiftTripleClick() { initText("q<caret>uick\n" + "brown\n" + "fox"); mouse().alt().shift().tripleClickAt(1, 2); checkResultByText("q<caret>uick\n" + "<selection>br<caret>own\n" + "</selection>fox"); } public void testAltShiftTripleClickAtExistingCaret() { initText("q<caret>uick\n" + "br<caret>own\n" + "fox"); mouse().alt().shift().tripleClickAt(1, 2); checkResultByText("q<caret>uick\n" + "brown\n" + "fox"); } public void testCaretPositionsRecalculationOnDocumentChange() { initText("\n" + "<selection><caret>word</selection>\n" + "some long prefix <selection><caret>word</selection>-suffix"); EditorTestUtil.configureSoftWraps(myEditor, 17); // wrapping right before 'word-suffix' delete(); checkResultByText("\n" + "<caret>\n" + "some long prefix <caret>-suffix"); verifySoftWrapPositions(19); } public void testCreateRectangularSelectionWithMouseClicks() { initText("<caret>line\n" + "long line\n" + "very long line\n" + "long line\n" + "line"); mouse().alt().shift().middle().clickAt(2, 2); checkResultByText("<selection>li<caret></selection>ne\n" + "<selection>lo<caret></selection>ng line\n" + "<selection>ve<caret></selection>ry long line\n" + "long line\n" + "line"); } public void testCreateRectangularSelectionExtendsSelection() { initText("<caret>line\n" + "long line\n" + "very long line\n" + "long line\n" + "line"); mouse().alt().shift().middle().clickAt(1, 1); checkResultByText("<selection>l<caret></selection>ine\n" + "<selection>l<caret></selection>ong line\n" + "very long line\n" + "long line\n" + "line"); mouse().alt().shift().middle().clickAt(2, 2); checkResultByText("<selection>li<caret></selection>ne\n" + "<selection>lo<caret></selection>ng line\n" + "<selection>ve<caret></selection>ry long line\n" + "long line\n" + "line"); } public void testAddingMultipleSelectionsUsingMouse() { initText("s<selection>om<caret></selection>e text\nother text"); setEditorVisibleSize(1000, 1000); mouse().alt().shift().pressAt(0, 5).dragTo(1, 2).release(); checkResultByText("s<selection>om<caret></selection>e <selection>text\not<caret></selection>her text"); } public void testAddingMultipleSelectionsUsingMouseInColumnSelectionMode() { initText("s<selection>om<caret></selection>e text\nother text"); setEditorVisibleSize(1000, 1000); ((EditorEx)myEditor).setColumnMode(true); mouse().alt().shift().pressAt(0, 5).dragTo(1, 2).release(); checkResultByText("s<selection>om<caret></selection>e <selection>text\not<caret></selection>her text"); } public void testAltShiftDragAfterRemovingCaret() { initText("<selection>a<caret></selection>b<caret>racadabra"); setEditorVisibleSize(1000, 1000); mouse().alt().shift().pressAt(0, 2).dragTo(0, 3).release(); checkResultByText("<selection>a<caret></selection>bracadabra"); } public void testAddingRectangualSelectionUsingMouse() { initText("s<selection>om<caret></selection>e text\nother text"); setEditorVisibleSize(1000, 1000); mouse().ctrl().alt().shift().pressAt(0, 7).dragTo(1, 5).release(); checkResultByText("s<selection>om<caret></selection>e <selection><caret>te</selection>xt\nother<selection><caret> t</selection>ext"); } public void testCaretPositionUpdateOnFolding() { initText("line1\n" + "line2\n" + "l<caret>ine3\n" + "line<caret>4"); addCollapsedFoldRegion(0, 6, "..."); verifyCaretsAndSelections(1, 1, 1, 1, 2, 4, 4, 4); } public void testCaretStaysPrimaryOnMerging() { initText("word\n" + "<caret>word word\n" + ""); myEditor.getCaretModel().addCaret(new VisualPosition(0, 0)); myEditor.getCaretModel().addCaret(new VisualPosition(1, 5)); assertEquals(new VisualPosition(1, 5), myEditor.getCaretModel().getPrimaryCaret().getVisualPosition()); down(); checkResultByText("word\n" + "<caret>word word\n" + "<caret>"); assertEquals(new VisualPosition(2, 0), myEditor.getCaretModel().getPrimaryCaret().getVisualPosition()); } private static void doWithAltClickShortcut(ThrowableRunnable runnable) throws Throwable { Keymap keymap = KeymapManager.getInstance().getActiveKeymap(); MouseShortcut shortcut = new MouseShortcut(1, InputEvent.ALT_DOWN_MASK, 1); try { keymap.addShortcut(IdeActions.ACTION_EDITOR_ADD_OR_REMOVE_CARET, shortcut); runnable.run(); } finally { keymap.removeShortcut(IdeActions.ACTION_EDITOR_ADD_OR_REMOVE_CARET, shortcut); } } public void testTypingAdjacentSpaces() { initText("<caret>\t<caret>\t"); rightWithSelection(); type(' '); checkResultByText(" <caret> <caret>"); } public void testCloneCaretBeforeInlay() { initText("\n"); addInlay(0); addInlay(1); mouse().clickAt(0, 0); executeAction("EditorCloneCaretBelow"); verifyCaretsAndSelections(0, 0, 0, 0, 1, 0, 0, 0); } public void testCloneCaretAfterInlay() { initText("\n"); addInlay(0); addInlay(1); mouse().clickAt(0, 1); executeAction("EditorCloneCaretBelow"); verifyCaretsAndSelections(0, 1, 1, 1, 1, 1, 1, 1); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. * * Copyright 2012-2019 the original author or authors. */ package org.assertj.core.api.recursive.comparison; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.entry; import static org.assertj.core.error.ShouldBeEqual.shouldBeEqual; import static org.assertj.core.error.ShouldNotBeNull.shouldNotBeNull; import static org.assertj.core.presentation.UnicodeRepresentation.UNICODE_REPRESENTATION; import static org.assertj.core.test.AlwaysEqualComparator.ALWAY_EQUALS_STRING; import static org.assertj.core.util.AssertionsUtil.expectAssertionError; import static org.assertj.core.util.Lists.list; import static org.junit.jupiter.params.provider.Arguments.arguments; import static org.mockito.Mockito.verify; import java.sql.Timestamp; import java.util.Date; import java.util.stream.Stream; import org.assertj.core.api.RecursiveComparisonAssert; import org.assertj.core.api.RecursiveComparisonAssert_isEqualTo_BaseTest; import org.assertj.core.internal.objects.data.AlwaysEqualPerson; import org.assertj.core.internal.objects.data.FriendlyPerson; import org.assertj.core.internal.objects.data.Giant; import org.assertj.core.internal.objects.data.Human; import org.assertj.core.internal.objects.data.Person; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; public class RecursiveComparisonAssert_isEqualTo_Test extends RecursiveComparisonAssert_isEqualTo_BaseTest { @Test public void should_pass_when_actual_and_expected_are_null() { // GIVEN Person actual = null; Person expected = null; // THEN assertThat(actual).usingRecursiveComparison() .isEqualTo(expected); } @Test public void should_fail_when_actual_is_null_and_expected_is_not() { // GIVEN Person actual = null; Person expected = new Person(); // WHEN compareRecursivelyFailsAsExpected(actual, expected); // THEN verify(failures).failure(info, shouldNotBeNull()); } @Test public void should_fail_when_actual_is_not_null_and_expected_is() { // GIVEN Person actual = new Person(); Person expected = null; // WHEN compareRecursivelyFailsAsExpected(actual, expected); // THEN verify(failures).failure(info, shouldBeEqual(actual, null, objects.getComparisonStrategy(), info.representation())); } @Test public void should_honor_test_description() { // GIVEN Person actual = new Person("John"); actual.home.address.number = 1; Person expected = new Person("John"); expected.home.address.number = 2; // WHEN AssertionError error = expectAssertionError(() -> assertThat(actual).as("test description") .usingRecursiveComparison() .isEqualTo(expected)); // THEN assertThat(error).hasMessageContaining("[test description]"); } @Test public void should_propagate_representation() { // GIVEN Person actual = new Person("John"); Person expected = new Person("John"); // WHEN RecursiveComparisonAssert<?> assertion = assertThat(actual).withRepresentation(UNICODE_REPRESENTATION) .usingRecursiveComparison() .isEqualTo(expected); // THEN assertThat(assertion.info.representation()).isEqualTo(UNICODE_REPRESENTATION); } @Test public void should_propagate_overridden_error_message() { // GIVEN Person actual = new Person("John"); Person expected = new Person("John"); String errorMessage = "boom"; // WHEN RecursiveComparisonAssert<?> assertion = assertThat(actual).overridingErrorMessage(errorMessage) .usingRecursiveComparison() .isEqualTo(expected); // THEN assertThat(assertion.info.overridingErrorMessage()).isEqualTo(errorMessage); } @Test public void should_propagate_comparators_by_type() { // GIVEN Person actual = new Person("John"); // WHEN RecursiveComparisonConfiguration assertion = assertThat(actual).usingComparatorForType(ALWAY_EQUALS_STRING, String.class) .usingRecursiveComparison() .getRecursiveComparisonConfiguration(); // THEN assertThat(assertion.comparatorByTypes()).contains(entry(String.class, ALWAY_EQUALS_STRING)); } @Test public void should_not_use_equal_implementation_of_root_objects_to_compare() { // GIVEN AlwaysEqualPerson actual = new AlwaysEqualPerson(); actual.name = "John"; actual.home.address.number = 1; AlwaysEqualPerson expected = new AlwaysEqualPerson(); expected.name = "John"; expected.home.address.number = 2; // WHEN compareRecursivelyFailsAsExpected(actual, expected); // THEN ComparisonDifference numberDifference = diff("home.address.number", actual.home.address.number, expected.home.address.number); verifyShouldBeEqualByComparingFieldByFieldRecursivelyCall(actual, expected, numberDifference); } @Test public void should_treat_date_as_equal_to_timestamp() { // GIVEN Person actual = new Person("Fred"); actual.dateOfBirth = new Date(1000L); Person expected = new Person("Fred"); expected.dateOfBirth = new Timestamp(1000L); // THEN assertThat(actual).usingRecursiveComparison() .isEqualTo(expected); } @Test public void should_be_able_to_compare_objects_with_percentages() { // GIVEN Person actual = new Person("foo"); Person expected = new Person("%foo"); // WHEN compareRecursivelyFailsAsExpected(actual, expected); // THEN ComparisonDifference nameDifference = diff("name", actual.name, expected.name); verifyShouldBeEqualByComparingFieldByFieldRecursivelyCall(actual, expected, nameDifference); } @Test public void should_fail_when_fields_of_different_nesting_levels_differ() { // GIVEN Person actual = new Person("John"); actual.home.address.number = 1; Person expected = new Person("Jack"); expected.home.address.number = 2; // WHEN compareRecursivelyFailsAsExpected(actual, expected); // THEN ComparisonDifference nameDifference = diff("name", actual.name, expected.name); ComparisonDifference numberDifference = diff("home.address.number", actual.home.address.number, expected.home.address.number); verifyShouldBeEqualByComparingFieldByFieldRecursivelyCall(actual, expected, numberDifference, nameDifference); } @SuppressWarnings("unused") @ParameterizedTest(name = "{2}: actual={0} / expected={1}") @MethodSource("recursivelyEqualObjects") public void should_pass_for_objects_with_the_same_data_when_using_the_default_recursive_comparison(Object actual, Object expected, String testDescription) { assertThat(actual).usingRecursiveComparison() .isEqualTo(expected); } private static Stream<Arguments> recursivelyEqualObjects() { Person person1 = new Person("John"); person1.home.address.number = 1; Person person2 = new Person("John"); person2.home.address.number = 1; Person person3 = new Person("John"); person3.home.address.number = 1; Human person4 = new Human(); person4.name = "John"; person4.home.address.number = 1; return Stream.of(arguments(person1, person2, "same data, same type"), arguments(person2, person1, "same data, same type reversed"), arguments(person3, person4, "same data, different type"), arguments(person4, person3, "same data, different type")); } @Test public void should_be_able_to_compare_objects_with_direct_cycles() { // GIVEN Person actual = new Person("John"); actual.home.address.number = 1; Person expected = new Person("John"); expected.home.address.number = 1; // neighbour expected.neighbour = actual; actual.neighbour = expected; // THEN assertThat(actual).usingRecursiveComparison() .isEqualTo(expected); } @Test public void should_be_able_to_compare_objects_with_cycles_in_ordered_collection() { // GIVEN FriendlyPerson actual = new FriendlyPerson(); actual.name = "John"; actual.home.address.number = 1; FriendlyPerson expected = new FriendlyPerson(); expected.name = "John"; expected.home.address.number = 1; // neighbour expected.neighbour = actual; actual.neighbour = expected; // friends FriendlyPerson sherlock = new FriendlyPerson(); sherlock.name = "Sherlock"; sherlock.home.address.number = 221; actual.friends.add(sherlock); actual.friends.add(expected); expected.friends.add(sherlock); expected.friends.add(actual); // THEN assertThat(actual).usingRecursiveComparison() .isEqualTo(expected); } @Test public void should_be_able_to_compare_objects_with_cycles_in_ordered_and_unordered_collection() { // GIVEN FriendlyPerson actual = new FriendlyPerson(); actual.name = "John"; actual.home.address.number = 1; FriendlyPerson expected = new FriendlyPerson(); expected.name = "John"; expected.home.address.number = 1; // neighbour - direct cycle expected.neighbour = actual; actual.neighbour = expected; // friends cycle with intermediate collection FriendlyPerson sherlock = new FriendlyPerson(); sherlock.name = "Sherlock"; sherlock.home.address.number = 221; // ordered collections actual.friends.add(sherlock); actual.friends.add(expected); expected.friends.add(sherlock); expected.friends.add(actual); // unordered collections // this could cause an infinite recursion if we don't track correctly the visited objects actual.otherFriends.add(actual); actual.otherFriends.add(expected); actual.otherFriends.add(sherlock); expected.otherFriends.add(sherlock); expected.otherFriends.add(expected); expected.otherFriends.add(actual); // THEN assertThat(actual).usingRecursiveComparison() .isEqualTo(expected); } @Test public void should_report_difference_in_collection() { // GIVEN FriendlyPerson actual = new FriendlyPerson(); FriendlyPerson actualFriend = new FriendlyPerson(); actualFriend.home.address.number = 99; actual.friends = list(actualFriend); FriendlyPerson expected = new FriendlyPerson(); FriendlyPerson expectedFriend = new FriendlyPerson(); expectedFriend.home.address.number = 10; expected.friends = list(expectedFriend); // WHEN compareRecursivelyFailsAsExpected(actual, expected); // THEN ComparisonDifference friendNumberDifference = diff("friends.home.address.number", 99, 10); verifyShouldBeEqualByComparingFieldByFieldRecursivelyCall(actual, expected, friendNumberDifference); } @Test public void should_report_missing_property() { // GIVEN Giant actual = new Giant(); actual.name = "joe"; actual.height = 3.0; Human expected = new Human(); expected.name = "joe"; // WHEN compareRecursivelyFailsAsExpected(actual, expected); // THEN ComparisonDifference missingFieldDifference = diff("", actual, expected, "org.assertj.core.internal.objects.data.Giant can't be compared to org.assertj.core.internal.objects.data.Human as Human does not declare all Giant fields, it lacks these: [height]"); verifyShouldBeEqualByComparingFieldByFieldRecursivelyCall(actual, expected, missingFieldDifference); } }
package org.insightech.er.editor.model.diagram_contents.element.node.table; import java.util.ArrayList; import java.util.List; import org.insightech.er.ResourceString; import org.insightech.er.db.DBManagerFactory; import org.insightech.er.editor.model.ObjectModel; import org.insightech.er.editor.model.diagram_contents.element.connection.ConnectionElement; import org.insightech.er.editor.model.diagram_contents.element.connection.Relation; import org.insightech.er.editor.model.diagram_contents.element.node.table.column.Column; import org.insightech.er.editor.model.diagram_contents.element.node.table.column.ColumnHolder; import org.insightech.er.editor.model.diagram_contents.element.node.table.column.NormalColumn; import org.insightech.er.editor.model.diagram_contents.element.node.table.index.CopyIndex; import org.insightech.er.editor.model.diagram_contents.element.node.table.index.Index; import org.insightech.er.editor.model.diagram_contents.element.node.table.properties.TableProperties; import org.insightech.er.editor.model.diagram_contents.element.node.table.properties.TablePropertiesHolder; import org.insightech.er.editor.model.diagram_contents.element.node.table.properties.TableViewProperties; import org.insightech.er.editor.model.diagram_contents.element.node.table.unique_key.ComplexUniqueKey; import org.insightech.er.editor.model.diagram_contents.element.node.table.unique_key.CopyComplexUniqueKey; public class ERTable extends TableView implements TablePropertiesHolder, ColumnHolder, ObjectModel { private static final long serialVersionUID = 11185865758118654L; public static final String NEW_PHYSICAL_NAME = ResourceString .getResourceString("new.table.physical.name"); public static final String NEW_LOGICAL_NAME = ResourceString .getResourceString("new.table.logical.name"); private String constraint; private String primaryKeyName; private String option; private List<Index> indexes; private List<ComplexUniqueKey> complexUniqueKeyList; public ERTable() { this.indexes = new ArrayList<Index>(); this.complexUniqueKeyList = new ArrayList<ComplexUniqueKey>(); } public NormalColumn getAutoIncrementColumn() { for (Column column : columns) { if (column instanceof NormalColumn) { NormalColumn normalColumn = (NormalColumn) column; if (normalColumn.isAutoIncrement()) { return normalColumn; } } } return null; } @Override public TableViewProperties getTableViewProperties() { this.tableViewProperties = DBManagerFactory.getDBManager( this.getDiagram()).createTableProperties( (TableProperties) this.tableViewProperties); return this.tableViewProperties; } public TableViewProperties getTableViewProperties(String database) { this.tableViewProperties = DBManagerFactory.getDBManager(database) .createTableProperties( (TableProperties) this.tableViewProperties); return this.tableViewProperties; } public void addIndex(Index index) { this.indexes.add(index); } @Override public ERTable copyData() { ERTable to = new ERTable(); to.setConstraint(this.getConstraint()); to.setPrimaryKeyName(this.getPrimaryKeyName()); to.setOption(this.getOption()); super.copyTableViewData(to); List<Index> indexes = new ArrayList<Index>(); for (Index fromIndex : this.getIndexes()) { indexes.add(new CopyIndex(to, fromIndex, to.getColumns())); } to.setIndexes(indexes); List<ComplexUniqueKey> complexUniqueKeyList = new ArrayList<ComplexUniqueKey>(); for (ComplexUniqueKey complexUniqueKey : this.getComplexUniqueKeyList()) { complexUniqueKeyList.add(new CopyComplexUniqueKey(complexUniqueKey, to.getColumns())); } to.complexUniqueKeyList = complexUniqueKeyList; to.tableViewProperties = (TableProperties) this .getTableViewProperties().clone(); return to; } @Override public void restructureData(TableView to) { ERTable table = (ERTable) to; table.setConstraint(this.getConstraint()); table.setPrimaryKeyName(this.getPrimaryKeyName()); table.setOption(this.getOption()); super.restructureData(to); List<Index> indexes = new ArrayList<Index>(); for (Index fromIndex : this.getIndexes()) { CopyIndex copyIndex = (CopyIndex) fromIndex; Index restructuredIndex = copyIndex.getRestructuredIndex(table); indexes.add(restructuredIndex); } table.setIndexes(indexes); List<ComplexUniqueKey> complexUniqueKeyList = new ArrayList<ComplexUniqueKey>(); for (ComplexUniqueKey complexUniqueKey : this.getComplexUniqueKeyList()) { CopyComplexUniqueKey copyComplexUniqueKey = (CopyComplexUniqueKey) complexUniqueKey; if (!copyComplexUniqueKey.isRemoved(this.getNormalColumns())) { ComplexUniqueKey restructuredComplexUniqueKey = copyComplexUniqueKey .restructure(); complexUniqueKeyList.add(restructuredComplexUniqueKey); } } table.complexUniqueKeyList = complexUniqueKeyList; table.tableViewProperties = (TableProperties) this.tableViewProperties .clone(); } public int getPrimaryKeySize() { int count = 0; for (Column column : this.columns) { if (column instanceof NormalColumn) { NormalColumn normalColumn = (NormalColumn) column; if (normalColumn.isPrimaryKey()) { count++; } } } return count; } public List<NormalColumn> getPrimaryKeys() { List<NormalColumn> primaryKeys = new ArrayList<NormalColumn>(); for (Column column : this.columns) { if (column instanceof NormalColumn) { NormalColumn normalColumn = (NormalColumn) column; if (normalColumn.isPrimaryKey()) { primaryKeys.add(normalColumn); } } } return primaryKeys; } public boolean isReferable() { if (this.getPrimaryKeySize() > 0) { return true; } if (this.complexUniqueKeyList.size() > 0) { return true; } for (Column column : this.columns) { if (column instanceof NormalColumn) { NormalColumn normalColumn = (NormalColumn) column; if (normalColumn.isUniqueKey()) { return true; } } } return false; } public Index getIndex(int index) { return this.indexes.get(index); } public void removeIndex(int index) { this.indexes.remove(index); } public List<Index> getIndexes() { return indexes; } public void setIndexes(List<Index> indexes) { this.indexes = indexes; } public void setComplexUniqueKeyList( List<ComplexUniqueKey> complexUniqueKeyList) { this.complexUniqueKeyList = complexUniqueKeyList; } public List<ComplexUniqueKey> getComplexUniqueKeyList() { return complexUniqueKeyList; } public void setTableViewProperties(TableProperties tableProperties) { this.tableViewProperties = tableProperties; } public List<Relation> getSelfRelations() { List<Relation> relations = new ArrayList<Relation>(); for (ConnectionElement connection : this.getOutgoings()) { if (connection instanceof Relation) { if (connection.getSource() == connection.getTarget()) { relations.add((Relation) connection); } } } return relations; } @Override public ERTable clone() { ERTable clone = (ERTable) super.clone(); TableProperties cloneTableProperties = (TableProperties) this .getTableViewProperties().clone(); clone.tableViewProperties = cloneTableProperties; return clone; } public String getConstraint() { return constraint; } public void setConstraint(String constraint) { this.constraint = constraint; } public String getPrimaryKeyName() { return primaryKeyName; } public void setPrimaryKeyName(String primaryKeyName) { this.primaryKeyName = primaryKeyName; } public String getOption() { return option; } public void setOption(String option) { this.option = option; } public static boolean isRecursive(TableView source, TableView target) { for (Relation relation : source.getIncomingRelations()) { TableView temp = relation.getSourceTableView(); if (temp.equals(source)) { continue; } if (temp.equals(target)) { return true; } if (isRecursive(temp, target)) { return true; } } return false; } public Relation createRelation() { boolean referenceForPK = false; ComplexUniqueKey referencedComplexUniqueKey = null; NormalColumn referencedColumn = null; boolean notNull = false; if (this.getPrimaryKeySize() > 0) { referenceForPK = true; notNull = true; } else if (this.getComplexUniqueKeyList().size() > 0) { referencedComplexUniqueKey = this.getComplexUniqueKeyList().get(0); notNull = referencedComplexUniqueKey.getColumnList().get(0) .isNotNull(); } else { for (NormalColumn normalColumn : this.getNormalColumns()) { if (normalColumn.isUniqueKey()) { referencedColumn = normalColumn; notNull = referencedColumn.isNotNull(); break; } } } return new Relation(referenceForPK, referencedComplexUniqueKey, referencedColumn, notNull, false); } public String getObjectType() { return "table"; } @Override public String toString() { return "name:" + this.getName() + ", " + super.toString(); } }
package org.square.qa.utilities.constructs; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.SortedMap; import java.util.SortedSet; import java.util.TreeMap; import java.util.TreeSet; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.LogManager; import org.jblas.DoubleMatrix; import org.square.qa.utilities.fileParsers.FileParserJStrings; @SuppressWarnings("unused") public class GeneralUtils { private static Logger log = LogManager.getLogger(GeneralUtils.class); private static GeneralUtilsParameterized<String, String, String> paramUtils = new GeneralUtilsParameterized<String, String, String>(); public static class nFoldSet{ public static boolean hasGT = false; public static boolean useNFold = false; public static Map<String,workersDataStruct<String,String> > workersMap = null; public static Map<String,String> gold = null; public static Map<String,String> gt = null; public static TreeSet<String> responseCategories=null; public static List<Map<String,workersDataStruct<String,String> > > workerMaps = null; public static List<Map<String,workersDataStruct<String,String> > > workerMapsTune = null; public static List<Map<String,workersDataStruct<String,String> > > workerMapsTrainTune = null; public static List<Map<String,String> > goldSet = null; public static List<Map<String,String> > gtSet = null; public static List<Map<String,String> > gtSetTune = null; public static List<Map<String,Double> > classPriorSet = null; public static Map<String,Integer> workerToInt = null; public static Map<String,Integer> questionToInt = null; public static Map<String,Integer> categToInt = null; public static List<Set<String> > foldRetain = null; public static List<Set<String> > foldDiscard = null;} /** * Get parameterized utils object * @return GeneralUtilsParameterized object with worker ids, questions and responses instantiated as strings */ public static GeneralUtilsParameterized<String, String, String> getParamUtils(){ return paramUtils;} /** * Get the parameters of a Beta distribution for input mean and variance * @param mean is double holding mean of the desired Beta distribution * @param variance is a double holding the variance of the desired Beta distribution * @return a Pair of Doubles holding a and b parameters of the corresponding Beta distribution */ public static Pair<Double, Double> getBetaParam(double mean, double variance){ double a,b; double meanCube = mean*mean*mean; double varianceSq = variance*variance; a = (meanCube + meanCube/mean - mean*(varianceSq))/varianceSq; b = (a*(1.0d - mean))/mean; return (new Pair<Double,Double>(a,b));} /** * Extract questions from workers map * @param workersMap is a Map from workers (String) to an object holding worker questions and responses (Strings) * @return a Set of questions (String) */ public static Set<String> getQuestions(Map<String,workersDataStruct<String,String> > workersMap){ Set<String> questions = new HashSet<String>(); for(String key:workersMap.keySet()){ questions.addAll(workersMap.get(key).getWorkerResponses().keySet());} return questions;} /** * * @param resultVector * @param metrics is a Metrics object to hold computed metrics * @param groundTruth is a Pair of DoubleMatrix and DoubleMatrix holding questions and responses * @param gold is the supervised/semi-supervised data to be excluded from metrics computation */ @SafeVarargs public static void computeMetrics(DoubleMatrix resultVector,Metrics metrics,Pair<DoubleMatrix,DoubleMatrix> groundTruth, Pair<DoubleMatrix,DoubleMatrix>... gold){ assert gold.length<=1:"Only one additional parameter accepted"; boolean usedGold = false; if(gold.length == 1) usedGold = true; DoubleMatrix gtRelavent = resultVector.mul(groundTruth.getSecond()); DoubleMatrix gt = groundTruth.getFirst(); if(usedGold){ gtRelavent.muli(gold[0].getSecond().eq(0.0d)); gt = gt.mul(gold[0].getSecond().eq(0.0d));} int index = 0; // double accNum = 0; // double accDen = 0; for(int categ:metrics.getCategoriesList()){ DoubleMatrix tempR = gtRelavent.eq((double)categ); DoubleMatrix tempGT = gt.eq((double)categ); double allCorrect = tempGT.get(tempGT.findIndices()).length; DoubleMatrix tempRTP = tempR.mul(tempGT); DoubleMatrix tempRFP = tempR.mul(tempGT.eq(0.0d)); DoubleMatrix tempRTN = tempR.eq(0.0d).mul(gt).mul(tempGT.eq(0.0d).mul(gt)); double tp = tempRTP.get(tempRTP.findIndices()).length; double fp = tempRFP.get(tempRFP.findIndices()).length; double tn = tempRTN.get(tempRTN.findIndices()).length; double precision = tp/(tp+fp); double recall = tp/allCorrect; double accuracy = (tp+tn)/(double)gt.get(gt.findIndices()).length; double fMeasure = 2.0d * ((precision*recall)/(precision+recall)); metrics.getPrecision().put(index, precision); metrics.getRecall().put(index,recall); metrics.getfMeasure().put(index,fMeasure); metrics.getAccuracy().put(index, accuracy); index++;} // accNum = gtRelavent.get(gtRelavent.eq(gt).mul(gt).findIndices()).length; // accDen = gt.get(gt.findIndices()).length; // double accuracy = accNum/accDen; // metrics.setAccuracy(DoubleMatrix.ones(index).mul(accuracy)); metrics.setAvgAccuracy(metrics.getAccuracy().mean()); metrics.setAvgPrecision(metrics.getPrecision().mean()); metrics.setAvgRecall(metrics.getRecall().mean()); metrics.setAvgFMeasure(metrics.getfMeasure().mean());} /** * Update NFold class with mapping from workers, questions and categories to Integer * @param responseCategories is a Set of String holding response categories */ public static void fillNFoldClass(Set<String> responseCategories){ nFoldSet.workerToInt = paramUtils.getWorkerIntMap(nFoldSet.workersMap); nFoldSet.questionToInt = paramUtils.getQuestionIntMap(nFoldSet.workersMap); nFoldSet.categToInt = paramUtils.getCategIntMap(responseCategories);} /** * Compute n folds from data 5 fold -> 20% => n = 5 80% => n = -5 * @param n is an int (positive till 50% use symmetrically negative for higher folds) */ public static void setNFoldSets(int n){ List<String> questions = new ArrayList<String>(); nFoldSet.workerMaps = new ArrayList<Map<String,workersDataStruct<String,String> > >(); nFoldSet.gtSet = new ArrayList<Map<String,String> >(); nFoldSet.gtSetTune = new ArrayList<Map<String,String> >(); nFoldSet.classPriorSet = new ArrayList<Map<String,Double> >(); nFoldSet.workerMapsTune = new ArrayList<Map<String,workersDataStruct<String,String> > >(); nFoldSet.workerMapsTrainTune = new ArrayList<Map<String,workersDataStruct<String,String> > >(); nFoldSet.foldRetain = new ArrayList<Set<String> >(); nFoldSet.foldDiscard = new ArrayList<Set<String> >(); questions.addAll(getQuestions(nFoldSet.workersMap)); // Collections.shuffle(questions); List<String> trainQuestions = questions; //Disregarding a test split. boolean reverse = false; if(n<0){ reverse = true; n = n*(-1);} double splits = 1.0d/(double)n; int initIdx = 0; int splitIdx = 0; nFoldSet.workerMapsTrainTune.add(paramUtils.getFilteredWorkerMap(nFoldSet.workersMap, trainQuestions)); for(int i = 1;i<=n;i++){ initIdx = (int)Math.floor((i-1)*splits*(trainQuestions.size())); splitIdx = (int)Math.floor((i*splits*(trainQuestions.size()))); List<String> tuneSet = new ArrayList<String>(); tuneSet.addAll(trainQuestions.subList(initIdx, splitIdx)); List<String> trainSet = new ArrayList<String>(); if(initIdx!=0){ trainSet.addAll(trainQuestions.subList(0, initIdx));} if(splitIdx!=trainQuestions.size()){ trainSet.addAll(trainQuestions.subList(splitIdx, trainQuestions.size()));} if(!reverse){ nFoldSet.workerMaps.add(paramUtils.getFilteredWorkerMap(nFoldSet.workersMap, trainSet)); nFoldSet.workerMapsTune.add(paramUtils.getFilteredWorkerMap(nFoldSet.workersMap, tuneSet)); Map<String, String> tempTune = paramUtils.getFilteredGT(nFoldSet.gt, tuneSet); Map<String, String> tempTrain = paramUtils.getFilteredGT(nFoldSet.gt, trainSet); nFoldSet.gtSetTune.add(tempTune); nFoldSet.foldRetain.add(tempTune.keySet()); nFoldSet.foldDiscard.add(tempTrain.keySet()); nFoldSet.gtSet.add(tempTrain); }else{ nFoldSet.workerMaps.add(paramUtils.getFilteredWorkerMap(nFoldSet.workersMap, tuneSet)); nFoldSet.workerMapsTune.add(paramUtils.getFilteredWorkerMap(nFoldSet.workersMap, trainSet)); Map<String, String> tempTune = paramUtils.getFilteredGT(nFoldSet.gt, trainSet); Map<String, String> tempTrain = paramUtils.getFilteredGT(nFoldSet.gt, tuneSet); nFoldSet.gtSetTune.add(tempTune); nFoldSet.foldRetain.add(tempTrain.keySet()); nFoldSet.foldDiscard.add(tempTune.keySet()); nFoldSet.gtSet.add(tempTrain);} } if(!reverse){ Collections.rotate(nFoldSet.foldRetain, 1); Collections.rotate(nFoldSet.foldDiscard, 1);} } /** * Print NFold data to files * @param dir is a File holding the print path for files * @throws IOException */ public static void printAll(File dir) throws IOException{ if(!dir.isDirectory()) dir.mkdirs(); String absPath; if(!dir.isAbsolute()) absPath = dir.getAbsolutePath(); else absPath = dir.getPath(); if (GeneralUtils.nFoldSet.useNFold){ paramUtils.printNumberedResponses(GeneralUtils.nFoldSet.workerMapsTrainTune.get(0),new File(absPath+"/responses_nFold_tuneEval_"+String.format("%03d",1) + ".txt")); paramUtils.printNumberedGT(GeneralUtils.nFoldSet.gt,new File(absPath+"/gt_nFold_tuneEval_"+String.format("%03d",1) + ".txt")); for(int i = 0;i<GeneralUtils.nFoldSet.workerMaps.size();i++){ paramUtils.printNumberedResponses(GeneralUtils.nFoldSet.workerMaps.get(i),new File(absPath+"/responses_nFold_eval_"+String.format("%03d", i+1) + ".txt")); paramUtils.printNumberedResponses(GeneralUtils.nFoldSet.workerMapsTune.get(i),new File(absPath+"/responses_nFold_tune_"+String.format("%03d", i+1) + ".txt")); paramUtils.printNumberedGT(GeneralUtils.nFoldSet.gtSet.get(i),new File(absPath+"/gt_nFold_eval_"+String.format("%03d", i+1) + ".txt")); paramUtils.printNumberedGT(GeneralUtils.nFoldSet.gtSetTune.get(i),new File(absPath+"/gt_nFold_tune_"+String.format("%03d", i+1) + ".txt")); paramUtils.printNumberedQuestions(GeneralUtils.nFoldSet.foldRetain.get(i), new File(absPath+"/fold_retain_"+String.format("%03d", i+1) + ".txt")); paramUtils.printNumberedQuestions(GeneralUtils.nFoldSet.foldDiscard.get(i), new File(absPath+"/fold_discard_"+String.format("%03d", i+1) + ".txt"));} }else{ paramUtils.printNumberedResponses(GeneralUtils.nFoldSet.workersMap,new File(absPath+"/responses_eval.txt")); if(GeneralUtils.nFoldSet.hasGT) paramUtils.printNumberedGT(GeneralUtils.nFoldSet.gt,new File(absPath+"/responses_gt.txt"));} File paramDir = new File(absPath+"/model"); if(!paramDir.isDirectory()) paramDir.mkdir(); paramUtils.printMapFileWI(GeneralUtils.nFoldSet.workerToInt, new File(paramDir.getAbsolutePath()+"/map_worker_integer.txt")); paramUtils.printMapFileQI(GeneralUtils.nFoldSet.questionToInt, new File(paramDir.getAbsolutePath()+"/map_question_integer.txt")); paramUtils.printMapFileRI(GeneralUtils.nFoldSet.categToInt, new File(paramDir.getAbsolutePath()+"/map_category_integer.txt"));} /** * Print data statistics * @param workersMap workersMap is a Map from workers to an object holding questions and responses * @param outDir outDir of type File is the output directory to print files * @throws FileNotFoundException */ public static void printStatistics(Map<String,workersDataStruct<String,String> > workersMap, File... outDir) throws FileNotFoundException{ assert outDir.length<2:"Only one output directory path accepted"; if(outDir.length==1) paramUtils.printStatistics(nFoldSet.questionToInt,nFoldSet.workerToInt,workersMap,outDir[0]); else paramUtils.printStatistics(nFoldSet.questionToInt,nFoldSet.workerToInt,workersMap);} /** * Load n fold sets from files * @param loadDir is a File holding path of the n fold files * @throws IOException */ public static void loadNFoldSet(File loadDir) throws IOException{ File files[] = loadDir.listFiles(); nFoldSet.workerMaps = new ArrayList<Map<String,workersDataStruct<String,String> > >(); nFoldSet.workerMapsTune = new ArrayList<Map<String,workersDataStruct<String,String> > >(); nFoldSet.workerMapsTrainTune = new ArrayList<Map<String,workersDataStruct<String,String> > >(); nFoldSet.gtSet = new ArrayList<Map<String,String> >(); nFoldSet.gtSetTune = new ArrayList<Map<String,String> >(); nFoldSet.responseCategories = new TreeSet<String>(); nFoldSet.foldRetain = new ArrayList<Set<String> >(); nFoldSet.foldDiscard = new ArrayList<Set<String> >(); nFoldSet.categToInt = new HashMap<String, Integer>(); nFoldSet.questionToInt = new HashMap<String, Integer>(); nFoldSet.workerToInt = new HashMap<String, Integer>(); //!!fix reading in map files and add support for fold retain and fold discard SortedMap<Integer,Map<String,workersDataStruct<String,String> > > tempWorkerMaps = new TreeMap<Integer,Map<String,workersDataStruct<String,String> > >(); SortedMap<Integer,Map<String,workersDataStruct<String,String> > > tempWorkerMapsTune = new TreeMap<Integer,Map<String,workersDataStruct<String,String> > >(); SortedMap<Integer,Map<String,workersDataStruct<String,String> > > tempWorkerMapsTrainTune = new TreeMap<Integer,Map<String,workersDataStruct<String,String> > >(); SortedMap<Integer,Map<String,String> > tempGTSet = new TreeMap<Integer,Map<String,String> >(); SortedMap<Integer,Map<String,String> > tempGTSetTune = new TreeMap<Integer,Map<String,String> >(); SortedMap<Integer,Set<String> > tempFoldRetain = new TreeMap<Integer,Set<String> >(); SortedMap<Integer,Set<String> > tempFoldDiscard = new TreeMap<Integer,Set<String> >(); FileParserJStrings fParser = new FileParserJStrings(); for(File file:files){ if(file.isDirectory()){ if(file.getName().equals("model")) { File modelFiles[] = file.listFiles(); for(File modelFile:modelFiles) { String modelFileName = modelFile.getName(); if(modelFileName.equalsIgnoreCase("map_category_integer.txt")) { fParser.setFileName(modelFile.getAbsolutePath()); Map<String,String> categToInt = fParser.parseGoldStandard(); for(String oldName:categToInt.keySet()) { nFoldSet.responseCategories.add(categToInt.get(oldName)); nFoldSet.categToInt.put(categToInt.get(oldName), Integer.parseInt(categToInt.get(oldName))); } } if(modelFileName.equalsIgnoreCase("map_question_integer.txt")) { fParser.setFileName(modelFile.getAbsolutePath()); Map<String,String> questionToInt = fParser.parseGoldStandard(); for(String oldName:questionToInt.keySet()) { nFoldSet.questionToInt.put(questionToInt.get(oldName), Integer.parseInt(questionToInt.get(oldName))); } } if(modelFileName.equalsIgnoreCase("map_worker_integer.txt")) { fParser.setFileName(modelFile.getAbsolutePath()); Map<String,String> workerToInt = fParser.parseGoldStandard(); for(String oldName:workerToInt.keySet()) { nFoldSet.workerToInt.put(workerToInt.get(oldName), Integer.parseInt(workerToInt.get(oldName))); } } } } else { continue;}} String filename = file.getName(); String[] parts = filename.split("\\."); if(parts.length <= 1) continue; if(parts[1].equalsIgnoreCase("txt")){ String[] innerParts = parts[0].split("_"); if(innerParts[innerParts.length-1].equalsIgnoreCase("eval")){ //unsupervised may or may not have gold available fParser.coloumnSwitch = true; fParser.setFileName(file.getAbsolutePath()); nFoldSet.workersMap = fParser.parseWorkerLabels(); fParser.coloumnSwitch = false; } else if(innerParts[innerParts.length-1].equalsIgnoreCase("gt")){ //unsupervised gt nFoldSet.hasGT = true; fParser.setFileName(file.getAbsolutePath()); nFoldSet.gt = fParser.parseGoldStandard(); }else if(innerParts[innerParts.length-2].equalsIgnoreCase("eval")){ Integer idx = Integer.valueOf(innerParts[innerParts.length-1]); if(innerParts[0].equalsIgnoreCase("responses")){ fParser.coloumnSwitch = true; fParser.setFileName(file.getAbsolutePath()); tempWorkerMaps.put(idx,fParser.parseWorkerLabels()); fParser.coloumnSwitch = false; } else if(innerParts[0].equalsIgnoreCase("gt")){ fParser.setFileName(file.getAbsolutePath()); tempGTSet.put(idx,fParser.parseGoldStandard());} }else if(innerParts[innerParts.length-2].equals("tune")){ Integer idx = Integer.valueOf(innerParts[innerParts.length-1]); if(innerParts[0].equalsIgnoreCase("responses")){ fParser.coloumnSwitch = true; fParser.setFileName(file.getAbsolutePath()); tempWorkerMapsTune.put(idx,fParser.parseWorkerLabels()); fParser.coloumnSwitch = false; } else if(innerParts[0].equalsIgnoreCase("gt")){ fParser.setFileName(file.getAbsolutePath()); tempGTSetTune.put(idx,fParser.parseGoldStandard());} }else if(innerParts[innerParts.length-2].equals("tuneEval")){ Integer idx = Integer.valueOf(innerParts[innerParts.length-1]); if(innerParts[0].equalsIgnoreCase("responses")){ nFoldSet.useNFold = true; fParser.coloumnSwitch = true; fParser.setFileName(file.getAbsolutePath()); nFoldSet.workerMapsTrainTune.add(fParser.parseWorkerLabels()); nFoldSet.workersMap = nFoldSet.workerMapsTrainTune.get(0); fParser.coloumnSwitch = false; }else if(innerParts[0].equalsIgnoreCase("gt")){ nFoldSet.hasGT = true; fParser.setFileName(file.getAbsolutePath()); nFoldSet.gt = fParser.parseGoldStandard(); } } else if(innerParts[innerParts.length-2].equals("retain")){ //fold retain Integer idx = Integer.valueOf(innerParts[innerParts.length-1]); fParser.setFileName(file.getAbsolutePath()); tempFoldRetain.put(idx, fParser.parseFoldRR()); } else if(innerParts[innerParts.length-2].equals("discard")){ //fold discard Integer idx = Integer.valueOf(innerParts[innerParts.length-1]); fParser.setFileName(file.getAbsolutePath()); tempFoldDiscard.put(idx, fParser.parseFoldRR()); }}} for(int idx:tempGTSet.keySet()){ nFoldSet.workerMaps.add(tempWorkerMaps.get(idx)); nFoldSet.workerMapsTune.add(tempWorkerMapsTune.get(idx)); nFoldSet.gtSet.add(tempGTSet.get(idx)); nFoldSet.gtSetTune.add(tempGTSetTune.get(idx)); nFoldSet.foldRetain.add(tempFoldRetain.get(idx)); nFoldSet.foldDiscard.add(tempFoldDiscard.get(idx));}} }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import javax.jms.Connection; import javax.jms.JMSException; import javax.jms.MessageConsumer; import javax.jms.MessageProducer; import javax.jms.Queue; import javax.jms.Session; import javax.jms.TextMessage; import javax.jms.Topic; import java.io.IOException; import java.util.concurrent.atomic.AtomicReference; import org.junit.Test; public class ConsumerSemanticsIT extends BaseKafkaJMSIT { @Test public void testQueueEnsureMessageSentBeforeSubscribeIsConsumed() throws IOException, InterruptedException, JMSException { String text = "testString"; TextMessage result; try(Connection connection = connectionFactory.createConnection()){ connection.start(); try(Session session = connection.createSession()){ Queue destination = session.createQueue("testQueueEnsureMessageSentBeforeSubscribeIsConsumed"); MessageProducer messageProducer = session.createProducer(destination); messageProducer.send(session.createTextMessage(text)); messageProducer.close(); MessageConsumer messageConsumer = session.createConsumer(destination); result = (TextMessage)messageConsumer.receive(1000); } } assertEquals(text, result == null ? null : result.getText()); } @Test public void testTopicEnsureMessageSentBeforeSubscribeIsNotConsumed() throws IOException, InterruptedException, JMSException { String text = "testString"; TextMessage result; try(Connection connection = connectionFactory.createConnection()){ connection.start(); try(Session session = connection.createSession()){ Topic destination = session.createTopic("testTopicEnsureMessageSentBeforeSubscribeIsNotConsumed"); MessageProducer messageProducer = session.createProducer(destination); messageProducer.send(session.createTextMessage(text)); messageProducer.close(); MessageConsumer messageConsumer = session.createConsumer(destination); result = (TextMessage)messageConsumer.receive(1000); } } assertNull(result); } @Test public void testTopicEnsureMessageSentAfterSubscribeIsConsumed() throws IOException, InterruptedException, JMSException { String text = "testString"; TextMessage result; try(Connection connection = connectionFactory.createConnection()){ connection.start(); try(Session session = connection.createSession()){ Topic destination = session.createTopic("testTopicEnsureMessageSentAfterSubscribeIsConsumed"); MessageConsumer messageConsumer = session.createConsumer(destination); messageConsumer.receive(1000); MessageProducer messageProducer = session.createProducer(destination); messageProducer.send(session.createTextMessage(text)); messageProducer.close(); result = (TextMessage)messageConsumer.receive(1000); } } assertEquals(text, result == null ? null : result.getText()); } @Test public void testTopicTwoConsumersEachGetMessage() throws IOException, InterruptedException, JMSException { String text = "testString"; TextMessage result; TextMessage result2; try(Connection connection = connectionFactory.createConnection()){ connection.start(); try(Session session = connection.createSession()){ Topic destination = session.createTopic("testTopicTwoConsumersEachGetMessage"); MessageProducer messageProducer = session.createProducer(destination); AtomicReference<TextMessage> messageAtomicReference = new AtomicReference<>(); Thread thread = new Thread(() -> { try (Connection connection2 = connectionFactory.createConnection()) { connection2.setClientID("1"); connection2.start(); try (Session session2 = connection2.createSession()) { Topic destination2 = session2.createTopic("testTopicTwoConsumersEachGetMessage"); MessageConsumer messageConsumer2 = session2.createConsumer(destination2); for(int i = 0; i < 1000; i++){ TextMessage message = (TextMessage) messageConsumer2.receive(1000); if (message!=null){ messageAtomicReference.set(message); break; } } } } catch (JMSException jmse){ } }); thread.start(); AtomicReference<TextMessage> messageAtomicReference2 = new AtomicReference<>(); Thread thread2 = new Thread(() -> { try (Connection connection2 = connectionFactory.createConnection()) { connection2.setClientID("2"); connection2.start(); try (Session session2 = connection2.createSession()) { Topic destination2 = session2.createTopic("testTopicTwoConsumersEachGetMessage"); MessageConsumer messageConsumer2 = session2.createConsumer(destination2); for(int i = 0; i < 1000; i++){ TextMessage message = (TextMessage) messageConsumer2.receive(1000); if (message!=null){ messageAtomicReference2.set(message); break; } } } } catch (JMSException jmse){ } }); thread2.start(); Thread.sleep(1000); messageProducer.send(session.createTextMessage(text)); messageProducer.close(); Thread.sleep(2000); result = messageAtomicReference.get(); result2 = messageAtomicReference2.get(); } } assertEquals(text, result == null ? null : result.getText()); assertEquals(text, result2 == null ? null : result2.getText()); } @Test public void testTopicTwoSharedDurableConsumersGetOnlyOneMessage() throws IOException, InterruptedException, JMSException { String text = "testString"; TextMessage result; TextMessage result2; try(Connection connection = connectionFactory.createConnection()){ connection.start(); try(Session session = connection.createSession()){ Topic destination = session.createTopic("testTopicTwoSharedDurableConsumersGetOnlyOneMessage"); MessageProducer messageProducer = session.createProducer(destination); AtomicReference<TextMessage> messageAtomicReference = new AtomicReference<>(); Thread thread = new Thread(() -> { try (Connection connection2 = connectionFactory.createConnection()) { connection2.start(); try (Session session2 = connection2.createSession()) { Topic destination2 = session2.createTopic("testTopicTwoSharedDurableConsumersGetOnlyOneMessage"); MessageConsumer messageConsumer2 = session2.createSharedDurableConsumer(destination2, "shared"); for(int i = 0; i < 1000; i++){ TextMessage message = (TextMessage) messageConsumer2.receive(1000); if (message!=null){ messageAtomicReference.set(message); break; } } } } catch (JMSException jmse){ } }); thread.start(); AtomicReference<TextMessage> messageAtomicReference2 = new AtomicReference<>(); Thread thread2 = new Thread(() -> { try (Connection connection2 = connectionFactory.createConnection()) { connection2.start(); try (Session session2 = connection2.createSession()) { Topic destination2 = session2.createTopic("testTopicTwoSharedDurableConsumersGetOnlyOneMessage"); MessageConsumer messageConsumer2 = session2.createSharedDurableConsumer(destination2, "shared"); for(int i = 0; i < 1000; i++){ TextMessage message = (TextMessage) messageConsumer2.receive(1000); if (message!=null){ messageAtomicReference2.set(message); break; } } } } catch (JMSException jmse){ } }); thread2.start(); Thread.sleep(1000); messageProducer.send(session.createTextMessage(text)); messageProducer.close(); Thread.sleep(10000); result = messageAtomicReference.get(); result2 = messageAtomicReference2.get(); } } assertEquals(text, result == null ? result2 == null ? null : result2.getText() : result.getText()); assertTrue(result == null ? result2 != null : true); } @Test public void testQueueTwoConsumersGetOnlyOneMessage() throws IOException, InterruptedException, JMSException { String text = "testString"; TextMessage result; TextMessage result2; try(Connection connection = connectionFactory.createConnection()){ connection.start(); try(Session session = connection.createSession()){ Queue destination = session.createQueue("testQueueTwoConsumersGetOnlyOneMessage"); MessageProducer messageProducer = session.createProducer(destination); AtomicReference<TextMessage> messageAtomicReference = new AtomicReference<>(); Thread thread = new Thread(() -> { try (Connection connection2 = connectionFactory.createConnection()) { connection2.start(); try (Session session2 = connection2.createSession()) { Queue destination2 = session2.createQueue("testQueueTwoConsumersGetOnlyOneMessage"); MessageConsumer messageConsumer2 = session2.createConsumer(destination2); messageAtomicReference.set( (TextMessage) messageConsumer2.receive(1000)); } } catch (JMSException jmse){ } }); thread.start(); AtomicReference<TextMessage> messageAtomicReference2 = new AtomicReference<>(); Thread thread2 = new Thread(() -> { try (Connection connection2 = connectionFactory.createConnection()) { connection2.start(); try (Session session2 = connection2.createSession()) { Queue destination2 = session2.createQueue("testQueueTwoConsumersGetOnlyOneMessage"); MessageConsumer messageConsumer2 = session2.createConsumer(destination2); messageAtomicReference2.set( (TextMessage) messageConsumer2.receive(1000)); } } catch (JMSException jmse){ } }); thread2.start(); messageProducer.send(session.createTextMessage(text)); messageProducer.close(); Thread.sleep(2000); result = messageAtomicReference.get(); result2 = messageAtomicReference2.get(); } } assertEquals(text, result == null ? result2 == null ? null : result2.getText() : result.getText()); assertTrue(result == null ? result2 != null : true); } @Test public void testTopicNonDurableDoesntReceiveMessagesWhilstNotConnected() throws IOException, InterruptedException, JMSException { String text = "testString"; TextMessage result; TextMessage result2; try(Connection connection = connectionFactory.createConnection()){ connection.start(); try(Session session = connection.createSession()){ Topic destination = session.createTopic("testTopicNonDurableDoesntReceiveMessagesWhilstNotConnected"); MessageConsumer messageConsumer = session.createConsumer(destination); messageConsumer.receive(1000); MessageProducer messageProducer = session.createProducer(destination); messageProducer.send(session.createTextMessage(text)); messageProducer.close(); result = (TextMessage)messageConsumer.receive(1000); AtomicReference<TextMessage> messageAtomicReference = new AtomicReference<>(); Thread thread = new Thread(() -> { try (Connection connection2 = connectionFactory.createConnection()) { connection.start(); try (Session session2 = connection2.createSession()) { Topic destination2 = session2.createTopic("testTopicNonDurableDoesntReceiveMessagesWhilstNotConnected"); MessageConsumer messageConsumer2 = session2.createConsumer(destination2); messageAtomicReference.set( (TextMessage) messageConsumer2.receive(1000)); } } catch (JMSException jmse){ } }); thread.start(); Thread.sleep(2000); result2 = messageAtomicReference.get(); } } assertEquals(text, result == null ? null : result.getText()); assertNull(result2); } }
/* * Copyright (c) 2002, 2007, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package sun.nio.cs.ext; import java.nio.ByteBuffer; import java.nio.CharBuffer; import java.nio.charset.Charset; import java.nio.charset.CharsetDecoder; import java.nio.charset.CharsetEncoder; import java.nio.charset.CoderResult; import java.nio.charset.CodingErrorAction; import sun.nio.cs.HistoricallyNamedCharset; import sun.nio.cs.Surrogate; import sun.nio.cs.US_ASCII; /* * Implementation notes: * * (1)"Standard based" (ASCII, JIS_X_0201 and JIS_X_0208) ISO2022-JP charset * is provided by the base implementation of this class. * * Three Microsoft ISO2022-JP variants, MS50220, MS50221 and MSISO2022JP * are provided via subclasses. * * (2)MS50220 and MS50221 are assumed to work the same way as Microsoft * CP50220 and CP50221's 7-bit implementation works by using CP5022X * specific JIS0208 and JIS0212 mapping tables (generated via Microsoft's * MultiByteToWideChar/WideCharToMultiByte APIs). The only difference * between these 2 classes is that MS50220 does not support singlebyte * halfwidth kana (Uff61-Uff9f) shiftin mechanism when "encoding", instead * these halfwidth kana characters are converted to their fullwidth JIS0208 * counterparts. * * The difference between the standard JIS_X_0208 and JIS_X_0212 mappings * and the CP50220/50221 specific are * * 0208 mapping: * 1)0x213d <-> U2015 (compared to U2014) * 2)One way mappings for 5 characters below * u2225 (ms) -> 0x2142 <-> u2016 (jis) * uff0d (ms) -> 0x215d <-> u2212 (jis) * uffe0 (ms) -> 0x2171 <-> u00a2 (jis) * uffe1 (ms) -> 0x2172 <-> u00a3 (jis) * uffe2 (ms) -> 0x224c <-> u00ac (jis) * //should consider 0xff5e -> 0x2141 <-> U301c? * 3)NEC Row13 0x2d21-0x2d79 * 4)85-94 ku <-> UE000,UE3AB (includes NEC selected * IBM kanji in 89-92ku) * 5)UFF61-UFF9f -> Fullwidth 0208 KANA * * 0212 mapping: * 1)0x2237 <-> UFF5E (Fullwidth Tilde) * 2)0x2271 <-> U2116 (Numero Sign) * 3)85-94 ku <-> UE3AC - UE757 * * (3)MSISO2022JP uses a JIS0208 mapping generated from MS932DB.b2c * and MS932DB.c2b by converting the SJIS codepoints back to their * JIS0208 counterparts. With the exception of * * (a)Codepoints with a resulting JIS0208 codepoints beyond 0x7e00 are * dropped (this includs the IBM Extended Kanji/Non-kanji from 0x9321 * to 0x972c) * (b)The Unicode codepoints that the IBM Extended Kanji/Non-kanji are * mapped to (in MS932) are mapped back to NEC selected IBM Kanji/ * Non-kanji area at 0x7921-0x7c7e. * * Compared to JIS_X_0208 mapping, this MS932 based mapping has * (a)different mappings for 7 JIS codepoints * 0x213d <-> U2015 * 0x2141 <-> UFF5E * 0x2142 <-> U2225 * 0x215d <-> Uff0d * 0x2171 <-> Uffe0 * 0x2172 <-> Uffe1 * 0x224c <-> Uffe2 * (b)added one-way c2b mappings for * U00b8 -> 0x2124 * U00b7 -> 0x2126 * U00af -> 0x2131 * U00ab -> 0x2263 * U00bb -> 0x2264 * U3094 -> 0x2574 * U00b5 -> 0x264c * (c)NEC Row 13 * (d)NEC selected IBM extended Kanji/Non-kanji * These codepoints are mapped to the same Unicode codepoints as * the MS932 does, while MS50220/50221 maps them to the Unicode * private area. * * # There is also an interesting difference when compared to MS5022X * 0208 mapping for JIS codepoint "0x2D60", MS932 maps it to U301d * but MS5022X maps it to U301e, obvious MS5022X is wrong, but... */ public class ISO2022_JP extends Charset implements HistoricallyNamedCharset { private static final int ASCII = 0; // ESC ( B private static final int JISX0201_1976 = 1; // ESC ( J private static final int JISX0208_1978 = 2; // ESC $ @ private static final int JISX0208_1983 = 3; // ESC $ B private static final int JISX0212_1990 = 4; // ESC $ ( D private static final int JISX0201_1976_KANA = 5; // ESC ( I private static final int SHIFTOUT = 6; private static final int ESC = 0x1b; private static final int SO = 0x0e; private static final int SI = 0x0f; public ISO2022_JP() { super("ISO-2022-JP", ExtendedCharsets.aliasesFor("ISO-2022-JP")); } protected ISO2022_JP(String canonicalName, String[] aliases) { super(canonicalName, aliases); } public String historicalName() { return "ISO2022JP"; } public boolean contains(Charset cs) { return ((cs instanceof JIS_X_0201) || (cs instanceof US_ASCII) || (cs instanceof JIS_X_0208) || (cs instanceof ISO2022_JP)); } public CharsetDecoder newDecoder() { return new Decoder(this, getDecIndex1(), getDecIndex2(), get0212Decoder()); } public CharsetEncoder newEncoder() { return new Encoder(this, getEncIndex1(), getEncIndex2(), get0212Encoder(), doSBKANA()); } protected short[] getDecIndex1() { return JIS_X_0208_Decoder.getIndex1(); } protected String[] getDecIndex2() { return JIS_X_0208_Decoder.getIndex2(); } protected DoubleByteDecoder get0212Decoder() { return null; } protected short[] getEncIndex1() { return JIS_X_0208_Encoder.getIndex1(); } protected String[] getEncIndex2() { return JIS_X_0208_Encoder.getIndex2(); } protected DoubleByteEncoder get0212Encoder() { return null; } protected boolean doSBKANA() { return true; } private static class Decoder extends DoubleByteDecoder implements DelegatableDecoder { private int currentState; private int previousState; private DoubleByteDecoder decoder0212; protected Decoder(Charset cs, short[] index1, String[] index2, DoubleByteDecoder decoder0212) { super(cs, index1, index2, 0x21, 0x7e); this.decoder0212 = decoder0212; currentState = ASCII; previousState = ASCII; } protected char convSingleByte(int b) { return REPLACE_CHAR; } public void implReset() { currentState = ASCII; previousState = ASCII; } private CoderResult decodeArrayLoop(ByteBuffer src, CharBuffer dst) { int inputSize = 0; int b1 = 0, b2 = 0, b3 = 0, b4 = 0; char c = REPLACE_CHAR; byte[] sa = src.array(); int sp = src.arrayOffset() + src.position(); int sl = src.arrayOffset() + src.limit(); assert (sp <= sl); sp = (sp <= sl ? sp : sl); char[] da = dst.array(); int dp = dst.arrayOffset() + dst.position(); int dl = dst.arrayOffset() + dst.limit(); assert (dp <= dl); dp = (dp <= dl ? dp : dl); try { while (sp < sl) { b1 = sa[sp] & 0xff; inputSize = 1; if ((b1 & 0x80) != 0) { return CoderResult.malformedForLength(inputSize); } if (b1 == ESC || b1 == SO || b1 == SI) { if (b1 == ESC) { if (sp + inputSize + 2 > sl) return CoderResult.UNDERFLOW; b2 = sa[sp + inputSize++] & 0xff; if (b2 == '(') { b3 = sa[sp + inputSize++] & 0xff; if (b3 == 'B'){ currentState = ASCII; } else if (b3 == 'J'){ currentState = JISX0201_1976; } else if (b3 == 'I'){ currentState = JISX0201_1976_KANA; } else { return CoderResult.malformedForLength(inputSize); } } else if (b2 == '$'){ b3 = sa[sp + inputSize++] & 0xff; if (b3 == '@'){ currentState = JISX0208_1978; } else if (b3 == 'B'){ currentState = JISX0208_1983; } else if (b3 == '(' && decoder0212 != null) { if (sp + inputSize + 1 > sl) return CoderResult.UNDERFLOW; b4 = sa[sp + inputSize++] & 0xff; if (b4 == 'D') { currentState = JISX0212_1990; } else { return CoderResult.malformedForLength(inputSize); } } else { return CoderResult.malformedForLength(inputSize); } } else { return CoderResult.malformedForLength(inputSize); } } else if (b1 == SO) { previousState = currentState; currentState = SHIFTOUT; } else if (b1 == SI) { currentState = previousState; } sp += inputSize; continue; } if (dp + 1 > dl) return CoderResult.OVERFLOW; switch (currentState){ case ASCII: da[dp++] = (char)(b1 & 0xff); break; case JISX0201_1976: switch (b1) { case 0x5c: // Yen/tilde substitution da[dp++] = '\u00a5'; break; case 0x7e: da[dp++] = '\u203e'; break; default: da[dp++] = (char)b1; break; } break; case JISX0208_1978: case JISX0208_1983: if (sp + inputSize + 1 > sl) return CoderResult.UNDERFLOW; b2 = sa[sp + inputSize++] & 0xff; c = decodeDouble(b1,b2); if (c == REPLACE_CHAR) return CoderResult.unmappableForLength(inputSize); da[dp++] = c; break; case JISX0212_1990: if (sp + inputSize + 1 > sl) return CoderResult.UNDERFLOW; b2 = sa[sp + inputSize++] & 0xff; c = decoder0212.decodeDouble(b1,b2); if (c == REPLACE_CHAR) return CoderResult.unmappableForLength(inputSize); da[dp++] = c; break; case JISX0201_1976_KANA: case SHIFTOUT: if (b1 > 0x60) { return CoderResult.malformedForLength(inputSize); } da[dp++] = (char)(b1 + 0xff40); break; } sp += inputSize; } return CoderResult.UNDERFLOW; } finally { src.position(sp - src.arrayOffset()); dst.position(dp - dst.arrayOffset()); } } private CoderResult decodeBufferLoop(ByteBuffer src, CharBuffer dst) { int mark = src.position(); int b1 = 0, b2 = 0, b3 = 0, b4=0; char c = REPLACE_CHAR; int inputSize = 0; try { while (src.hasRemaining()) { b1 = src.get() & 0xff; inputSize = 1; if ((b1 & 0x80) != 0) return CoderResult.malformedForLength(inputSize); if (b1 == ESC || b1 == SO || b1 == SI) { if (b1 == ESC) { // ESC if (src.remaining() < 2) return CoderResult.UNDERFLOW; b2 = src.get() & 0xff; inputSize++; if (b2 == '(') { b3 = src.get() & 0xff; inputSize++; if (b3 == 'B'){ currentState = ASCII; } else if (b3 == 'J'){ currentState = JISX0201_1976; } else if (b3 == 'I'){ currentState = JISX0201_1976_KANA; } else { return CoderResult.malformedForLength(inputSize); } } else if (b2 == '$'){ b3 = src.get() & 0xff; inputSize++; if (b3 == '@'){ currentState = JISX0208_1978; } else if (b3 == 'B'){ currentState = JISX0208_1983; } else if (b3 == '(' && decoder0212 != null) { if (!src.hasRemaining()) return CoderResult.UNDERFLOW; b4 = src.get() & 0xff; inputSize++; if (b4 == 'D') { currentState = JISX0212_1990; } else { return CoderResult.malformedForLength(inputSize); } } else { return CoderResult.malformedForLength(inputSize); } } else { return CoderResult.malformedForLength(inputSize); } } else if (b1 == SO) { previousState = currentState; currentState = SHIFTOUT; } else if (b1 == SI) { // shift back in currentState = previousState; } mark += inputSize; continue; } if (!dst.hasRemaining()) return CoderResult.OVERFLOW; switch (currentState){ case ASCII: dst.put((char)(b1 & 0xff)); break; case JISX0201_1976: switch (b1) { case 0x5c: // Yen/tilde substitution dst.put('\u00a5'); break; case 0x7e: dst.put('\u203e'); break; default: dst.put((char)b1); break; } break; case JISX0208_1978: case JISX0208_1983: if (!src.hasRemaining()) return CoderResult.UNDERFLOW; b2 = src.get() & 0xff; inputSize++; c = decodeDouble(b1,b2); if (c == REPLACE_CHAR) return CoderResult.unmappableForLength(inputSize); dst.put(c); break; case JISX0212_1990: if (!src.hasRemaining()) return CoderResult.UNDERFLOW; b2 = src.get() & 0xff; inputSize++; c = decoder0212.decodeDouble(b1,b2); if (c == REPLACE_CHAR) return CoderResult.unmappableForLength(inputSize); dst.put(c); break; case JISX0201_1976_KANA: case SHIFTOUT: if (b1 > 0x60) { return CoderResult.malformedForLength(inputSize); } dst.put((char)(b1 + 0xff40)); break; } mark += inputSize; } return CoderResult.UNDERFLOW; } finally { src.position(mark); } } // Make some protected methods public for use by JISAutoDetect public CoderResult decodeLoop(ByteBuffer src, CharBuffer dst) { if (src.hasArray() && dst.hasArray()) return decodeArrayLoop(src, dst); else return decodeBufferLoop(src, dst); } public CoderResult implFlush(CharBuffer out) { return super.implFlush(out); } } private static class Encoder extends DoubleByteEncoder { private static byte[] repl = { (byte)0x21, (byte)0x29 }; private int currentMode = ASCII; private int replaceMode = JISX0208_1983; private DoubleByteEncoder encoder0212 = null; private boolean doSBKANA; private Encoder(Charset cs, short[] index1, String[] index2, DoubleByteEncoder encoder0212, boolean doSBKANA) { super(cs, index1, index2, repl, 4.0f, (encoder0212 != null)? 9.0f : 8.0f); this.encoder0212 = encoder0212; this.doSBKANA = doSBKANA; } protected int encodeSingle(char inputChar) { return -1; } protected void implReset() { currentMode = ASCII; } protected void implReplaceWith(byte[] newReplacement) { /* It's almost impossible to decide which charset they belong to. The best thing we can do here is to "guess" based on the length of newReplacement. */ if (newReplacement.length == 1) { replaceMode = ASCII; } else if (newReplacement.length == 2) { replaceMode = JISX0208_1983; } } protected CoderResult implFlush(ByteBuffer out) { if (currentMode != ASCII) { if (out.remaining() < 3) return CoderResult.OVERFLOW; out.put((byte)0x1b); out.put((byte)0x28); out.put((byte)0x42); currentMode = ASCII; } return CoderResult.UNDERFLOW; } public boolean canEncode(char c) { return ((c <= '\u007F') || (c >= 0xFF61 && c <= 0xFF9F) || (c == '\u00A5') || (c == '\u203E') || super.canEncode(c) || (encoder0212!=null && encoder0212.canEncode(c))); } private final Surrogate.Parser sgp = new Surrogate.Parser(); private CoderResult encodeArrayLoop(CharBuffer src, ByteBuffer dst) { char[] sa = src.array(); int sp = src.arrayOffset() + src.position(); int sl = src.arrayOffset() + src.limit(); assert (sp <= sl); sp = (sp <= sl ? sp : sl); byte[] da = dst.array(); int dp = dst.arrayOffset() + dst.position(); int dl = dst.arrayOffset() + dst.limit(); assert (dp <= dl); dp = (dp <= dl ? dp : dl); try { while (sp < sl) { char c = sa[sp]; if (c <= '\u007F') { if (currentMode != ASCII) { if (dl - dp < 3) return CoderResult.OVERFLOW; da[dp++] = (byte)0x1b; da[dp++] = (byte)0x28; da[dp++] = (byte)0x42; currentMode = ASCII; } if (dl - dp < 1) return CoderResult.OVERFLOW; da[dp++] = (byte)c; } else if (c >= 0xff61 && c <= 0xff9f && doSBKANA) { //a single byte kana if (currentMode != JISX0201_1976_KANA) { if (dl - dp < 3) return CoderResult.OVERFLOW; da[dp++] = (byte)0x1b; da[dp++] = (byte)0x28; da[dp++] = (byte)0x49; currentMode = JISX0201_1976_KANA; } if (dl - dp < 1) return CoderResult.OVERFLOW; da[dp++] = (byte)(c - 0xff40); } else if (c == '\u00A5' || c == '\u203E') { //backslash or tilde if (currentMode != JISX0201_1976) { if (dl - dp < 3) return CoderResult.OVERFLOW; da[dp++] = (byte)0x1b; da[dp++] = (byte)0x28; da[dp++] = (byte)0x4a; currentMode = JISX0201_1976; } if (dl - dp < 1) return CoderResult.OVERFLOW; da[dp++] = (c == '\u00A5')?(byte)0x5C:(byte)0x7e; } else { int index = encodeDouble(c); if (index != 0) { if (currentMode != JISX0208_1983) { if (dl - dp < 3) return CoderResult.OVERFLOW; da[dp++] = (byte)0x1b; da[dp++] = (byte)0x24; da[dp++] = (byte)0x42; currentMode = JISX0208_1983; } if (dl - dp < 2) return CoderResult.OVERFLOW; da[dp++] = (byte)(index >> 8); da[dp++] = (byte)(index & 0xff); } else if (encoder0212 != null && (index = encoder0212.encodeDouble(c)) != 0) { if (currentMode != JISX0212_1990) { if (dl - dp < 4) return CoderResult.OVERFLOW; da[dp++] = (byte)0x1b; da[dp++] = (byte)0x24; da[dp++] = (byte)0x28; da[dp++] = (byte)0x44; currentMode = JISX0212_1990; } if (dl - dp < 2) return CoderResult.OVERFLOW; da[dp++] = (byte)(index >> 8); da[dp++] = (byte)(index & 0xff); } else { if (Surrogate.is(c) && sgp.parse(c, sa, sp, sl) < 0) return sgp.error(); if (unmappableCharacterAction() == CodingErrorAction.REPLACE && currentMode != replaceMode) { if (dl - dp < 3) return CoderResult.OVERFLOW; if (replaceMode == ASCII) { da[dp++] = (byte)0x1b; da[dp++] = (byte)0x28; da[dp++] = (byte)0x42; } else { da[dp++] = (byte)0x1b; da[dp++] = (byte)0x24; da[dp++] = (byte)0x42; } currentMode = replaceMode; } if (Surrogate.is(c)) return sgp.unmappableResult(); return CoderResult.unmappableForLength(1); } } sp++; } return CoderResult.UNDERFLOW; } finally { src.position(sp - src.arrayOffset()); dst.position(dp - dst.arrayOffset()); } } private CoderResult encodeBufferLoop(CharBuffer src, ByteBuffer dst) { int mark = src.position(); try { while (src.hasRemaining()) { char c = src.get(); if (c <= '\u007F') { if (currentMode != ASCII) { if (dst.remaining() < 3) return CoderResult.OVERFLOW; dst.put((byte)0x1b); dst.put((byte)0x28); dst.put((byte)0x42); currentMode = ASCII; } if (dst.remaining() < 1) return CoderResult.OVERFLOW; dst.put((byte)c); } else if (c >= 0xff61 && c <= 0xff9f && doSBKANA) { //Is it a single byte kana? if (currentMode != JISX0201_1976_KANA) { if (dst.remaining() < 3) return CoderResult.OVERFLOW; dst.put((byte)0x1b); dst.put((byte)0x28); dst.put((byte)0x49); currentMode = JISX0201_1976_KANA; } if (dst.remaining() < 1) return CoderResult.OVERFLOW; dst.put((byte)(c - 0xff40)); } else if (c == '\u00a5' || c == '\u203E') { if (currentMode != JISX0201_1976) { if (dst.remaining() < 3) return CoderResult.OVERFLOW; dst.put((byte)0x1b); dst.put((byte)0x28); dst.put((byte)0x4a); currentMode = JISX0201_1976; } if (dst.remaining() < 1) return CoderResult.OVERFLOW; dst.put((c == '\u00A5')?(byte)0x5C:(byte)0x7e); } else { int index = encodeDouble(c); if (index != 0) { if (currentMode != JISX0208_1983) { if (dst.remaining() < 3) return CoderResult.OVERFLOW; dst.put((byte)0x1b); dst.put((byte)0x24); dst.put((byte)0x42); currentMode = JISX0208_1983; } if (dst.remaining() < 2) return CoderResult.OVERFLOW; dst.put((byte)(index >> 8)); dst.put((byte)(index & 0xff)); } else if (encoder0212 != null && (index = encoder0212.encodeDouble(c)) != 0) { if (currentMode != JISX0212_1990) { if (dst.remaining() < 4) return CoderResult.OVERFLOW; dst.put((byte)0x1b); dst.put((byte)0x24); dst.put((byte)0x28); dst.put((byte)0x44); currentMode = JISX0212_1990; } if (dst.remaining() < 2) return CoderResult.OVERFLOW; dst.put((byte)(index >> 8)); dst.put((byte)(index & 0xff)); } else { if (Surrogate.is(c) && sgp.parse(c, src) < 0) return sgp.error(); if (unmappableCharacterAction() == CodingErrorAction.REPLACE && currentMode != replaceMode) { if (dst.remaining() < 3) return CoderResult.OVERFLOW; if (replaceMode == ASCII) { dst.put((byte)0x1b); dst.put((byte)0x28); dst.put((byte)0x42); } else { dst.put((byte)0x1b); dst.put((byte)0x24); dst.put((byte)0x42); } currentMode = replaceMode; } if (Surrogate.is(c)) return sgp.unmappableResult(); return CoderResult.unmappableForLength(1); } } mark++; } return CoderResult.UNDERFLOW; } finally { src.position(mark); } } protected CoderResult encodeLoop(CharBuffer src, ByteBuffer dst) { if (src.hasArray() && dst.hasArray()) return encodeArrayLoop(src, dst); else return encodeBufferLoop(src, dst); } } }
package com.xtremelabs.robolectric.shadows; import android.content.ContentValues; import android.database.Cursor; import android.database.sqlite.SQLiteDatabase; import com.xtremelabs.robolectric.Robolectric; import com.xtremelabs.robolectric.WithTestDefaultsRunner; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import static com.xtremelabs.robolectric.Robolectric.shadowOf; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.not; import static org.junit.Assert.*; public abstract class DatabaseTestBase { protected SQLiteDatabase database; protected ShadowSQLiteDatabase shDatabase; @Before public void setUp() throws Exception { database = SQLiteDatabase.openDatabase("path", null, 0); shDatabase = Robolectric.shadowOf(database); database.execSQL("CREATE TABLE table_name (\n" + " id INTEGER PRIMARY KEY AUTOINCREMENT,\n" + " first_column VARCHAR(255),\n" + " second_column BINARY,\n" + " name VARCHAR(255),\n" + " big_int INTEGER\n" + ");"); database.execSQL("CREATE TABLE rawtable (\n" + " id INTEGER PRIMARY KEY AUTOINCREMENT,\n" + " first_column VARCHAR(255),\n" + " second_column BINARY,\n" + " name VARCHAR(255),\n" + " big_int INTEGER\n" + ");"); database.execSQL("CREATE TABLE exectable (\n" + " id INTEGER PRIMARY KEY AUTOINCREMENT,\n" + " first_column VARCHAR(255),\n" + " second_column BINARY,\n" + " name VARCHAR(255),\n" + " big_int INTEGER\n" + ");"); String stringColumnValue = "column_value"; byte[] byteColumnValue = new byte[]{1, 2, 3}; ContentValues values = new ContentValues(); values.put("first_column", stringColumnValue); values.put("second_column", byteColumnValue); database.insert("rawtable", null, values); //////////////////////////////////////////////// String stringColumnValue2 = "column_value2"; byte[] byteColumnValue2 = new byte[]{4, 5, 6}; ContentValues values2 = new ContentValues(); values2.put("first_column", stringColumnValue2); values2.put("second_column", byteColumnValue2); database.insert("rawtable", null, values2); } @After public void tearDown() throws Exception { database.close(); } @Test() public void testInsertAndQuery() throws Exception { String stringColumnValue = "column_value"; byte[] byteColumnValue = new byte[]{1, 2, 3}; ContentValues values = new ContentValues(); values.put("first_column", stringColumnValue); values.put("second_column", byteColumnValue); database.insert("table_name", null, values); Cursor cursor = database.query("table_name", new String[]{"second_column", "first_column"}, null, null, null, null, null); assertThat(cursor.moveToFirst(), equalTo(true)); byte[] byteValueFromDatabase = cursor.getBlob(0); String stringValueFromDatabase = cursor.getString(1); assertThat(stringValueFromDatabase, equalTo(stringColumnValue)); assertThat(byteValueFromDatabase, equalTo(byteColumnValue)); } @Test public void testInsertAndRawQuery() throws Exception { String stringColumnValue = "column_value"; byte[] byteColumnValue = new byte[]{1, 2, 3}; ContentValues values = new ContentValues(); values.put("first_column", stringColumnValue); values.put("second_column", byteColumnValue); database.insert("table_name", null, values); Cursor cursor = database.rawQuery("select second_column, first_column from table_name", null); assertThat(cursor.moveToFirst(), equalTo(true)); byte[] byteValueFromDatabase = cursor.getBlob(0); String stringValueFromDatabase = cursor.getString(1); assertThat(stringValueFromDatabase, equalTo(stringColumnValue)); assertThat(byteValueFromDatabase, equalTo(byteColumnValue)); } @Test(expected = android.database.SQLException.class) public void testInsertOrThrowWithSQLException() { shDatabase.setThrowOnInsert(true); database.insertOrThrow("table_name", null, new ContentValues()); } @Test public void testInsertOrThrow() { String stringColumnValue = "column_value"; byte[] byteColumnValue = new byte[]{1, 2, 3}; ContentValues values = new ContentValues(); values.put("first_column", stringColumnValue); values.put("second_column", byteColumnValue); database.insertOrThrow("table_name", null, values); Cursor cursor = database.rawQuery("select second_column, first_column from table_name", null); assertThat(cursor.moveToFirst(), equalTo(true)); byte[] byteValueFromDatabase = cursor.getBlob(0); String stringValueFromDatabase = cursor.getString(1); assertThat(stringValueFromDatabase, equalTo(stringColumnValue)); assertThat(byteValueFromDatabase, equalTo(byteColumnValue)); } @Test(expected = IllegalArgumentException.class) public void testRawQueryThrowsIndex0NullException() throws Exception { database.rawQuery("select second_column, first_column from rawtable WHERE `id` = ?", new String[]{null}); } @Test(expected = IllegalArgumentException.class) public void testRawQueryThrowsIndex0NullException2() throws Exception { database.rawQuery("select second_column, first_column from rawtable", new String[]{null}); } @Test public void testRawQueryCount() throws Exception { Cursor cursor = database.rawQuery("select second_column, first_column from rawtable WHERE `id` = ?", new String[]{"1"}); assertThat(cursor.getCount(), equalTo(1)); } @Test public void testRawQueryCount2() throws Exception { Cursor cursor = database.rawQuery("select second_column, first_column from rawtable", null); assertThat(cursor.getCount(), equalTo(2)); } @Test public void testRawQueryCount3() throws Exception { Cursor cursor = database.rawQuery("select second_column, first_column from rawtable", new String[]{}); assertThat(cursor.getCount(), equalTo(2)); } /* * Reason why testRawQueryCount4() and testRawQueryCount5() expects exceptions even though exceptions are not found in Android. * * The code in Android acts inconsistently under API version 2.1_r1 (and perhaps other APIs).. * What happens is that rawQuery() remembers the selectionArgs of previous queries, * and uses them if no selectionArgs are given in subsequent queries. * If they were never given selectionArgs THEN they return empty cursors. * * * if you run { * db.rawQuery("select * from exercise WHERE name = ?",null); //this returns an empty cursor * db.rawQuery("select * from exercise WHERE name = ?",new String[]{}); //this returns an empty cursor * } * * but if you run { * db.rawQuery("select * from exercise WHERE name = ?",new String[]{"Leg Press"}); //this returns 1 exercise named "Leg Press" * db.rawQuery("select * from exercise WHERE name = ?",null); //this too returns 1 exercise named "Leg Press" * db.rawQuery("select * from exercise WHERE name = ?",new String[]{}); //this too returns 1 exercise named "Leg Press" * } * * so SQLite + Android work inconsistently (it maintains state that it should not) * whereas H2 just throws an exception for not supplying the selectionArgs * * So the question is should Robolectric: * 1) throw an exception, the way H2 does. * 2) return an empty Cursor. * 3) mimic Android\SQLite precisely and return inconsistent results based on previous state * * Returning an empty cursor all the time would be bad * because Android doesn't always return an empty cursor. * But just mimicing Android would not be helpful, * since it would be less than obvious where the problem is coming from. * One should just avoid ever calling a statement without selectionArgs (when one has a ? placeholder), * so it is best to throw an Exception to let the programmer know that this isn't going to turn out well if they try to run it under Android. * Because we are running in the context of a test we do not have to mimic Android precisely (if it is more helpful not to!), we just need to help * the testing programmer figure out what is going on. */ @Test(expected = Exception.class) public void testRawQueryCount4() throws Exception { //Android and SQLite don't normally throw an exception here. See above explanation as to why Robolectric should. Cursor cursor = database.rawQuery("select second_column, first_column from rawtable WHERE `id` = ?", null); } @Test(expected = Exception.class) public void testRawQueryCount5() throws Exception { //Android and SQLite don't normally throw an exception here. See above explanation as to why Robolectric should. Cursor cursor = database.rawQuery("select second_column, first_column from rawtable WHERE `id` = ?", new String[]{}); } @Test(expected = android.database.sqlite.SQLiteException.class) public void testRawQueryCount8() throws Exception { Cursor cursor = database.rawQuery("select second_column, first_column from rawtable", new String[]{"1"}); } @Test public void testInsertWithException() { ContentValues values = new ContentValues(); assertEquals(-1, database.insert("table_that_doesnt_exist", null, values)); } @Test public void testEmptyTable() throws Exception { Cursor cursor = database.query("table_name", new String[]{"second_column", "first_column"}, null, null, null, null, null); assertThat(cursor.moveToFirst(), equalTo(false)); } @Test public void testInsertRowIdGeneration() throws Exception { ContentValues values = new ContentValues(); values.put("name", "Chuck"); long id = database.insert("table_name", null, values); assertThat(id, not(equalTo(0L))); } @Test public void testInsertKeyGeneration() throws Exception { ContentValues values = new ContentValues(); values.put("name", "Chuck"); long key = database.insertWithOnConflict("table_name", null, values, SQLiteDatabase.CONFLICT_IGNORE); assertThat(key, not(equalTo(0L))); } @Test public void testUpdate() throws Exception { addChuck(); assertThat(updateName(1234L, "Buster"), equalTo(1)); Cursor cursor = database.query("table_name", new String[]{"id", "name"}, null, null, null, null, null); assertThat(cursor.moveToFirst(), equalTo(true)); assertThat(cursor.getCount(), equalTo(1)); assertIdAndName(cursor, 1234L, "Buster"); } @Test public void testUpdateNoMatch() throws Exception { addChuck(); assertThat(updateName(5678L, "Buster"), equalTo(0)); Cursor cursor = database.query("table_name", new String[]{"id", "name"}, null, null, null, null, null); assertThat(cursor.moveToFirst(), equalTo(true)); assertThat(cursor.getCount(), equalTo(1)); assertIdAndName(cursor, 1234L, "Chuck"); } @Test public void testUpdateAll() throws Exception { addChuck(); addJulie(); assertThat(updateName("Belvedere"), equalTo(2)); Cursor cursor = database.query("table_name", new String[]{"id", "name"}, null, null, null, null, null); assertThat(cursor.moveToFirst(), equalTo(true)); assertThat(cursor.getCount(), equalTo(2)); assertIdAndName(cursor, 1234L, "Belvedere"); assertThat(cursor.moveToNext(), equalTo(true)); assertIdAndName(cursor, 1235L, "Belvedere"); assertThat(cursor.isLast(), equalTo(true)); assertThat(cursor.moveToNext(), equalTo(false)); assertThat(cursor.isAfterLast(), equalTo(true)); assertThat(cursor.moveToNext(), equalTo(false)); } @Test public void testDelete() throws Exception { addChuck(); int deleted = database.delete("table_name", "id=1234", null); assertThat(deleted, equalTo(1)); assertEmptyDatabase(); } @Test public void testDeleteNoMatch() throws Exception { addChuck(); int deleted = database.delete("table_name", "id=5678", null); assertThat(deleted, equalTo(0)); assertNonEmptyDatabase(); } @Test public void testDeleteAll() throws Exception { addChuck(); addJulie(); int deleted = database.delete("table_name", "1", null); assertThat(deleted, equalTo(2)); assertEmptyDatabase(); } @Test public void testExecSQL() throws Exception { Statement statement; ResultSet resultSet; database.execSQL("INSERT INTO table_name (id, name) VALUES(1234, 'Chuck');"); statement = shadowOf(database).getConnection().createStatement(); resultSet = statement.executeQuery("SELECT COUNT(*) FROM table_name"); assertThat(resultSet.next(), equalTo(true)); assertThat(resultSet.getInt(1), equalTo(1)); statement = shadowOf(database).getConnection().createStatement(); resultSet = statement.executeQuery("SELECT * FROM table_name"); assertThat(resultSet.next(), equalTo(true)); assertThat(resultSet.getInt(1), equalTo(1234)); assertThat(resultSet.getString(4), equalTo("Chuck")); } @Test public void testExecSQLParams() throws Exception { Statement statement; ResultSet resultSet; database.execSQL("CREATE TABLE `routine` (`id` INTEGER PRIMARY KEY AUTOINCREMENT , `name` VARCHAR , `lastUsed` INTEGER DEFAULT 0 , UNIQUE (`name`)) ", new Object[]{}); database.execSQL("INSERT INTO `routine` (`name` ,`lastUsed` ) VALUES (?,?)", new Object[]{"Leg Press", 0}); database.execSQL("INSERT INTO `routine` (`name` ,`lastUsed` ) VALUES (?,?)", new Object[]{"Bench Press", 1}); statement = shadowOf(database).getConnection().createStatement(); resultSet = statement.executeQuery("SELECT COUNT(*) FROM `routine`"); assertThat(resultSet.next(), equalTo(true)); assertThat(resultSet.getInt(1), equalTo(2)); statement = shadowOf(database).getConnection().createStatement(); resultSet = statement.executeQuery("SELECT `id`, `name` ,`lastUsed` FROM `routine`"); assertThat(resultSet.next(), equalTo(true)); assertThat(resultSet.getInt(1), equalTo(1)); assertThat(resultSet.getString(2), equalTo("Leg Press")); assertThat(resultSet.getInt(3), equalTo(0)); assertThat(resultSet.next(), equalTo(true)); assertThat(resultSet.getLong(1), equalTo(2L)); assertThat(resultSet.getString(2), equalTo("Bench Press")); assertThat(resultSet.getInt(3), equalTo(1)); } @Test(expected = android.database.SQLException.class) public void testExecSQLException() throws Exception { database.execSQL("INSERT INTO table_name;"); // invalid SQL } @Test(expected = IllegalArgumentException.class) public void testExecSQLException2() throws Exception { database.execSQL("insert into exectable (first_column) values (?);", null); } @Test(expected = IllegalArgumentException.class) public void testExecSQLException4() throws Exception { database.execSQL("insert into exectable (first_column) values ('sdfsfs');", null); } @Test(expected = Exception.class) public void testExecSQLException5() throws Exception { //TODO: make this throw android.database.SQLException.class database.execSQL("insert into exectable (first_column) values ('kjhk');", new String[]{"xxxx"}); } @Test(expected = Exception.class) public void testExecSQLException6() throws Exception { //TODO: make this throw android.database.SQLException.class database.execSQL("insert into exectable (first_column) values ('kdfd');", new String[]{null}); } @Test public void testExecSQL2() throws Exception { database.execSQL("insert into exectable (first_column) values ('eff');", new String[]{}); } @Test public void testExecSQLInsertNull() throws Exception { String name = "nullone"; database.execSQL("insert into exectable (first_column, name) values (?,?);", new String[]{null, name}); Cursor cursor = database.rawQuery("select * from exectable WHERE `name` = ?", new String[]{name}); cursor.moveToFirst(); int firstIndex = cursor.getColumnIndex("first_column"); int nameIndex = cursor.getColumnIndex("name"); assertThat(cursor.getString(nameIndex), equalTo(name)); assertThat(cursor.getString(firstIndex), equalTo(null)); } @Test(expected = Exception.class) public void testExecSQLInsertNullShouldBeException() throws Exception { //this inserts null in android, but it when it happens it is likely an error. H2 throws an exception. So we'll make Robolectric expect an Exception so that the error can be found. database.delete("exectable", null, null); Cursor cursor = database.rawQuery("select * from exectable", null); cursor.moveToFirst(); assertThat(cursor.getCount(), equalTo(0)); database.execSQL("insert into exectable (first_column) values (?);", new String[]{}); Cursor cursor2 = database.rawQuery("select * from exectable", new String[]{null}); cursor.moveToFirst(); assertThat(cursor2.getCount(), equalTo(1)); } @Test public void testExecSQLAutoIncrementSQLite() throws Exception { database.execSQL("CREATE TABLE auto_table (id INTEGER PRIMARY KEY AUTOINCREMENT, name VARCHAR(255));"); ContentValues values = new ContentValues(); values.put("name", "Chuck"); long key = database.insert("auto_table", null, values); assertThat(key, not(equalTo(0L))); long key2 = database.insert("auto_table", null, values); assertThat(key2, not(equalTo(key))); } @Test(expected = IllegalStateException.class) public void testClose() throws Exception { database.close(); database.execSQL("INSERT INTO table_name (id, name) VALUES(1234, 'Chuck');"); } @Test public void testIsOpen() throws Exception { assertThat(database.isOpen(), equalTo(true)); database.close(); assertThat(database.isOpen(), equalTo(false)); } @Test public void shouldStoreGreatBigHonkinIntegersCorrectly() throws Exception { database.execSQL("INSERT INTO table_name(big_int) VALUES(1234567890123456789);"); Cursor cursor = database.query("table_name", new String[]{"big_int"}, null, null, null, null, null); cursor.moveToFirst(); assertEquals(1234567890123456789L, cursor.getLong(0)); } @Test public void testSuccessTransaction() throws SQLException { assertThat(shDatabase.isTransactionSuccess(), equalTo(false)); database.beginTransaction(); assertThat(shDatabase.isTransactionSuccess(), equalTo(false)); database.execSQL("INSERT INTO table_name (id, name) VALUES(1234, 'Chuck');"); assertThat(shDatabase.isTransactionSuccess(), equalTo(false)); database.setTransactionSuccessful(); assertThat(shDatabase.isTransactionSuccess(), equalTo(true)); database.endTransaction(); assertThat(shDatabase.isTransactionSuccess(), equalTo(false)); Statement statement = shadowOf(database).getConnection().createStatement(); ResultSet resultSet = statement.executeQuery("SELECT COUNT(*) FROM table_name"); assertThat(resultSet.next(), equalTo(true)); assertThat(resultSet.getInt(1), equalTo(1)); } @Test public void testFailureTransaction() throws Exception { assertThat(shDatabase.isTransactionSuccess(), equalTo(false)); database.beginTransaction(); assertThat(shDatabase.isTransactionSuccess(), equalTo(false)); database.execSQL("INSERT INTO table_name (id, name) VALUES(1234, 'Chuck');"); Statement statement = shadowOf(database).getConnection().createStatement(); final String select = "SELECT COUNT(*) FROM table_name"; ResultSet rs = statement.executeQuery(select); assertThat(rs.next(), equalTo(true)); assertThat(rs.getInt(1), equalTo(1)); rs.close(); assertThat(shDatabase.isTransactionSuccess(), equalTo(false)); database.endTransaction(); statement = shadowOf(database).getConnection().createStatement(); rs = statement.executeQuery(select); assertThat(rs.next(), equalTo(true)); assertThat(rs.getInt(1), equalTo(0)); assertThat(shDatabase.isTransactionSuccess(), equalTo(false)); } @Test public void testTransactionAlreadySuccessful() { database.beginTransaction(); database.setTransactionSuccessful(); try { database.setTransactionSuccessful(); fail("didn't receive the expected IllegalStateException"); } catch (IllegalStateException e) { assertThat(e.getMessage(), equalTo("transaction already successfully")); } } @Test public void testInTransaction() throws Exception { assertThat( database.inTransaction(), equalTo(false) ); database.beginTransaction(); assertThat( database.inTransaction(), equalTo(true) ); database.endTransaction(); assertThat( database.inTransaction(), equalTo(false) ); } protected long addChuck() { return addPerson(1234L, "Chuck"); } protected long addJulie() { return addPerson(1235L, "Julie"); } protected long addPerson(long id, String name) { ContentValues values = new ContentValues(); values.put("id", id); values.put("name", name); return database.insert("table_name", null, values); } protected int updateName(long id, String name) { ContentValues values = new ContentValues(); values.put("name", name); return database.update("table_name", values, "id=" + id, null); } protected int updateName(String name) { ContentValues values = new ContentValues(); values.put("name", name); return database.update("table_name", values, null, null); } protected void assertIdAndName(Cursor cursor, long id, String name) { long idValueFromDatabase; String stringValueFromDatabase; idValueFromDatabase = cursor.getLong(0); stringValueFromDatabase = cursor.getString(1); assertThat(idValueFromDatabase, equalTo(id)); assertThat(stringValueFromDatabase, equalTo(name)); } protected void assertEmptyDatabase() { Cursor cursor = database.query("table_name", new String[]{"id", "name"}, null, null, null, null, null); assertThat(cursor.moveToFirst(), equalTo(false)); assertThat(cursor.isClosed(), equalTo(false)); assertThat(cursor.getCount(), equalTo(0)); } protected void assertNonEmptyDatabase() { Cursor cursor = database.query("table_name", new String[]{"id", "name"}, null, null, null, null, null); assertThat(cursor.moveToFirst(), equalTo(true)); assertThat(cursor.getCount(), not(equalTo(0))); } }
/* * Autosleep * Copyright (C) 2016 Orange * Authors: Benjamin Einaudi benjamin.einaudi@orange.com * Arnaud Ruffin arnaud.ruffin@orange.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.cloudfoundry.autosleep.access.cloudfoundry; import lombok.extern.slf4j.Slf4j; import org.cloudfoundry.autosleep.access.cloudfoundry.model.ApplicationActivity; import org.cloudfoundry.autosleep.access.cloudfoundry.model.ApplicationIdentity; import org.cloudfoundry.autosleep.access.dao.model.ApplicationInfo; import org.cloudfoundry.autosleep.config.Config; import org.cloudfoundry.autosleep.config.Config.CloudFoundryAppState; import org.cloudfoundry.client.CloudFoundryClient; import org.cloudfoundry.client.v2.ClientV2Exception; import org.cloudfoundry.client.v2.applications.*; import org.cloudfoundry.client.v2.domains.GetDomainRequest; import org.cloudfoundry.client.v2.domains.GetDomainResponse; import org.cloudfoundry.client.v2.events.EventEntity; import org.cloudfoundry.client.v2.events.EventResource; import org.cloudfoundry.client.v2.events.ListEventsRequest; import org.cloudfoundry.client.v2.events.ListEventsResponse; import org.cloudfoundry.client.v2.organizations.GetOrganizationRequest; import org.cloudfoundry.client.v2.organizations.GetOrganizationResponse; import org.cloudfoundry.client.v2.routes.*; import org.cloudfoundry.client.v2.servicebindings.CreateServiceBindingRequest; import org.cloudfoundry.client.v2.servicebindings.DeleteServiceBindingRequest; import org.cloudfoundry.client.v2.serviceinstances.BindServiceInstanceRouteRequest; import org.cloudfoundry.doppler.DopplerClient; import org.cloudfoundry.doppler.LogMessage; import org.cloudfoundry.doppler.RecentLogsRequest; import org.reactivestreams.Subscriber; import org.reactivestreams.Subscription; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; import java.time.Instant; import java.util.ArrayList; import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Supplier; import java.util.regex.Pattern; import java.util.stream.Collectors; @Slf4j @Service public class CloudFoundryApi implements CloudFoundryApiService { private static class BaseSubscriber<T> implements Subscriber<T> { Consumer<Throwable> errorConsumer; CountDownLatch latch; Consumer<T> resultConsumer; public BaseSubscriber(CountDownLatch latch, Consumer<Throwable> errorConsumer, Consumer<T> resultConsumer) { this.latch = latch; this.resultConsumer = resultConsumer; this.errorConsumer = errorConsumer; } @Override public void onComplete() { latch.countDown(); } @Override public void onError(Throwable throwable) { if (errorConsumer != null) { errorConsumer.accept(throwable); } latch.countDown(); } @Override public void onNext(T result) { if (resultConsumer != null) { resultConsumer.accept(result); } } @Override public void onSubscribe(Subscription subscription) { subscription.request(Long.MAX_VALUE); } } static final int CF_INSTANCES_ERROR = 220_001; static final int CF_STAGING_NOT_FINISHED = 170_002; static final int CF_ORGANIZATION_NOT_FOUND = 30_003; @Autowired private CloudFoundryClient cfClient; @Autowired private DopplerClient dopplerClient; private <T, U> void bind(List<T> objectsToBind, Function<T, Mono<U>> caller) throws CloudFoundryException { log.debug("bind - {} objects", objectsToBind.size()); final CountDownLatch latch = new CountDownLatch(objectsToBind.size()); final AtomicReference<Throwable> errorEncountered = new AtomicReference<>(null); final Subscriber<U> subscriber = new BaseSubscriber<>(latch, errorEncountered::set, null); objectsToBind.forEach(objectToBind -> caller.apply(objectToBind).subscribe(subscriber)); waitForResult(latch, errorEncountered, null); } @Override public void bindApplications(String serviceInstanceId, List<ApplicationIdentity> applications) throws CloudFoundryException { bind(applications, application -> cfClient.serviceBindingsV2() .create( CreateServiceBindingRequest .builder() .applicationId(application.getGuid()) .serviceInstanceId(serviceInstanceId) .build())); } public void bindRoutes(String serviceInstanceId, List<String> routeIds) throws CloudFoundryException { bind(routeIds, routeId -> cfClient.serviceInstances() .bindRoute( BindServiceInstanceRouteRequest.builder() .serviceInstanceId(serviceInstanceId) .routeId(routeId) .build())); } private ApplicationInfo.DiagnosticInfo.ApplicationEvent buildAppEvent(EventResource event) { if (event == null) { return null; } else { EventEntity cfEvent = event.getEntity(); return ApplicationInfo.DiagnosticInfo.ApplicationEvent.builder() .actee(cfEvent.getActee()) .actor(cfEvent.getActor()) .name(cfEvent.getType()) .timestamp(Instant.parse(cfEvent.getTimestamp()).toEpochMilli()) .type(cfEvent.getType()) .build(); } } private ApplicationInfo.DiagnosticInfo.ApplicationLog buildAppLog(LogMessage logMessage) { return logMessage == null ? null : ApplicationInfo.DiagnosticInfo.ApplicationLog.builder() .message(logMessage.getMessage()) .timestampNanos(getLogMessageTimestampNanos(logMessage)) .messageType(getLogMessageType(logMessage)) .sourceId(logMessage.getSourceInstance()) .sourceName(logMessage.getSourceType()) .build(); } private String getLogMessageType(LogMessage logMessage) { return logMessage.getMessageType() != null ? logMessage.getMessageType().name() : null; } private boolean changeApplicationState(String applicationUuid, String targetState) throws CloudFoundryException { log.debug("changeApplicationState to {}", targetState); try { if (!targetState.equals(getApplicationState(applicationUuid))) { cfClient.applicationsV2() .update( UpdateApplicationRequest.builder() .applicationId(applicationUuid) .state(targetState) .build()) .block(Config.CF_API_TIMEOUT); return true; } else { log.warn("application {} already in state {}, nothing to do", applicationUuid, targetState); return false; } } catch (RuntimeException r) { throw new CloudFoundryException(r); } } @Override public ApplicationActivity getApplicationActivity(String appUid) throws CloudFoundryException { log.debug("getApplicationActivity - {}", appUid); //We need to call for appState, lastlogs and lastEvents final CountDownLatch latch = new CountDownLatch(3); final AtomicReference<Throwable> errorEncountered = new AtomicReference<>(null); final AtomicReference<LogMessage> lastLogReference = new AtomicReference<>(null); final AtomicReference<ListEventsResponse> lastEventsReference = new AtomicReference<>(null); final AtomicReference<GetApplicationResponse> appReference = new AtomicReference<>(null); final AtomicReference<Instant> mostRecentLogInstant = new AtomicReference<>(null); cfClient.applicationsV2() .get(GetApplicationRequest.builder() .applicationId(appUid) .build()) .subscribe(new BaseSubscriber<>(latch, errorEncountered::set, appReference::set)); cfClient.events() .list(ListEventsRequest.builder() .actee(appUid) .build()) .subscribe(new BaseSubscriber<>(latch, errorEncountered::set, lastEventsReference::set)); dopplerClient.recentLogs(RecentLogsRequest.builder() .applicationId(appUid) .build()) .subscribe(new BaseSubscriber<>( latch, errorEncountered::set, envelope -> { //Sometimes observed missing log envelope timestamp, rather relying on log message timestamp LogMessage logMessage = envelope.getLogMessage(); if (logMessage == null) { log.warn("recent log without log message. Skipping. Envelope: {}", envelope); return; } Long timestamp = logMessage.getTimestamp(); if (timestamp == null) { log.warn("recent log without log message timestamp. Skipping. Envelope: {}", envelope); return; } //logs are not ordered, must find the most recent Instant msgInstant = getInstantFromLogMessageTimestamp(timestamp); if (mostRecentLogInstant.get() == null || mostRecentLogInstant.get().isBefore(msgInstant)) { mostRecentLogInstant.set(msgInstant); lastLogReference.set(envelope.getLogMessage()); } } )); ApplicationActivity applicationActivity = waitForResult(latch, errorEncountered, () -> ApplicationActivity.builder() .application(ApplicationIdentity.builder() .guid(appUid) .name(appReference.get().getEntity().getName()) .build()) .lastEvent( lastEventsReference.get().getResources().isEmpty() ? null : buildAppEvent(lastEventsReference.get().getResources().get(0))) .lastLog(buildAppLog(lastLogReference.get())) .state(appReference.get().getEntity().getState()) .build()); log.debug("getApplicationActivity - {} - resulted in {}", appUid, applicationActivity); return applicationActivity; } public Instant getInstantFromLogMessageTimestamp(Long logMessageTimestamp) { return Instant.ofEpochSecond(0, logMessageTimestamp); } private Mono<ApplicationInstancesResponse> getApplicationInstances(String applicationUuid) { log.debug("listApplicationRoutes"); return cfClient.applicationsV2() .instances( ApplicationInstancesRequest.builder() .applicationId(applicationUuid) .build()) .otherwise(throwable -> { if (throwable instanceof org.cloudfoundry.client.v2.ClientV2Exception && isNoInstanceFoundError((org.cloudfoundry.client.v2.ClientV2Exception) throwable)) { return Mono.just(ApplicationInstancesResponse.builder().build()); } else { return Mono.error(throwable); } }); } @Override public String getApplicationState(String applicationUuid) throws CloudFoundryException { log.debug("getApplicationState"); try { return this.cfClient .applicationsV2() .get(GetApplicationRequest.builder() .applicationId(applicationUuid) .build()) .block(Config.CF_API_TIMEOUT) .getEntity().getState(); } catch (RuntimeException r) { throw new CloudFoundryException(r); } } private Long getLogMessageTimestampNanos(LogMessage logMessage) { Long timestamp = logMessage.getTimestamp(); return (timestamp != null ? timestamp : 0L); } @Override public String getHost(String routeId) throws CloudFoundryException { try { log.debug("getHost"); GetRouteResponse response = cfClient.routes() .get(GetRouteRequest.builder() .routeId(routeId) .build()) .block(Config.CF_API_TIMEOUT); RouteEntity routeEntity = response.getEntity(); String route = routeEntity.getHost() + routeEntity.getPath(); log.debug("route = {}", route); String domainId = routeEntity.getDomainId(); String domain = getDeprecatedDomain(domainId); return route + "." + domain; } catch (RuntimeException r) { throw new CloudFoundryException(r); } } private String getDeprecatedDomain(String domainId) { GetDomainResponse domainResponse = cfClient.domains() .get(GetDomainRequest.builder() .domainId(domainId) .build()) .block(Config.CF_API_TIMEOUT); log.debug("domain = {}", domainResponse.getEntity()); return domainResponse.getEntity().getName(); } @Override public boolean isAppRunning(String appUid) throws CloudFoundryException { log.debug("isAppRunning"); try { return !getApplicationInstances(appUid) .flatMap(response -> Flux.fromIterable(response.getInstances().values())) .filter(instanceInfo -> "RUNNING".equals(instanceInfo.getState())) .collect(ArrayList<ApplicationInstanceInfo>::new, ArrayList::add) .block(Config.CF_API_TIMEOUT) .isEmpty(); } catch (RuntimeException r) { throw new CloudFoundryException(r); } } private boolean isNoInstanceFoundError(ClientV2Exception cloudfoundryException) { return cloudfoundryException.getCode() == CF_INSTANCES_ERROR || cloudfoundryException.getCode() == CF_STAGING_NOT_FINISHED; } @Override public List<ApplicationIdentity> listAliveApplications(String spaceUuid, Pattern excludeNames) throws CloudFoundryException { log.debug("listAliveApplications from space_guid:" + spaceUuid); try { return Mono.just(spaceUuid) .then(spaceId -> this.cfClient .applicationsV2() .list(ListApplicationsRequest.builder() .spaceId(spaceUuid) .build())) .flatMap(listApplicationsResponse -> Flux.fromIterable(listApplicationsResponse.getResources())) //remove all filtered applications .filter(applicationResource -> excludeNames == null || !excludeNames.matcher(applicationResource.getEntity().getName()).matches()) //get instances .flatMap(applicationResource -> Mono.when(Mono.just(applicationResource), getApplicationInstances(applicationResource.getMetadata().getId()))) //filter the one that has no instances (ie. STOPPED) .filter(tuple -> !tuple.getT2().getInstances().isEmpty()) .map(tuple -> ApplicationIdentity.builder() .guid(tuple.getT1().getMetadata().getId()) .name(tuple.getT1().getEntity().getName()) .build()) .collect(ArrayList<ApplicationIdentity>::new, ArrayList::add) .block(Config.CF_API_TIMEOUT); } catch (RuntimeException r) { throw new CloudFoundryException("failed listing applications from space_id: " + spaceUuid, r); } } @Override public List<String> listApplicationRoutes(String applicationUuid) throws CloudFoundryException { log.debug("listApplicationRoutes"); try { ListApplicationRoutesResponse response = cfClient.applicationsV2() .listRoutes( ListApplicationRoutesRequest.builder() .applicationId(applicationUuid) .build()) .block(Config.CF_API_TIMEOUT); return response.getResources().stream() .map(routeResource -> routeResource.getMetadata().getId()) .collect(Collectors.toList()); } catch (RuntimeException r) { throw new CloudFoundryException(r); } } @Override public List<String> listRouteApplications(String routeUuid) throws CloudFoundryException { log.debug("listRouteApplications"); try { ListRouteApplicationsResponse response = cfClient.routes() .listApplications( ListRouteApplicationsRequest.builder() .routeId(routeUuid) .build()) .block(Config.CF_API_TIMEOUT); return response.getResources().stream() .map(appResource -> appResource.getMetadata().getId()) .collect(Collectors.toList()); } catch (RuntimeException r) { throw new CloudFoundryException(r); } } @Override public boolean startApplication(String applicationUuid) throws CloudFoundryException { log.debug("startApplication"); return changeApplicationState(applicationUuid, CloudFoundryAppState.STARTED); } @Override public boolean stopApplication(String applicationUuid) throws CloudFoundryException { log.debug("stopApplication"); return changeApplicationState(applicationUuid, CloudFoundryAppState.STOPPED); } @Override public void unbind(String bindingId) throws CloudFoundryException { try { cfClient.serviceBindingsV2() .delete(DeleteServiceBindingRequest.builder() .serviceBindingId(bindingId) .build()) .block(Config.CF_API_TIMEOUT); } catch (RuntimeException r) { throw new CloudFoundryException(r); } } private <T> T waitForResult(CountDownLatch latch, AtomicReference<Throwable> errorEncountered, Supplier<T> callback) throws CloudFoundryException { try { if (!latch.await(Config.CF_API_TIMEOUT.getSeconds(), TimeUnit.SECONDS)) { throw new IllegalStateException("subscriber timed out"); } else if (errorEncountered.get() != null) { throw new CloudFoundryException(errorEncountered.get()); } else { if (callback != null) { return callback.get(); } else { return null; } } } catch (InterruptedException e) { log.error(e.getMessage()); return null; } } public boolean isValidOrganization(String organizationGuid) throws CloudFoundryException { GetOrganizationResponse response = cfClient.organizations() .get(GetOrganizationRequest.builder().organizationId(organizationGuid).build()) .otherwise(throwable -> { if (throwable instanceof org.cloudfoundry.client.v2.ClientV2Exception && isNoOrganizationFoundError( (org.cloudfoundry.client.v2.ClientV2Exception) throwable)) { return Mono.just(GetOrganizationResponse.builder().build()); } else { return Mono.error(throwable); } }).block(Config.CF_API_TIMEOUT); return response.getEntity() != null; } private boolean isNoOrganizationFoundError( ClientV2Exception cloudfoundryException) { return cloudfoundryException.getCode() == CF_ORGANIZATION_NOT_FOUND; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.mapreduce.v2.app.rm; import com.google.common.annotations.VisibleForTesting; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.JobID; import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hadoop.mapreduce.TypeConverter; import org.apache.hadoop.mapreduce.v2.api.records.JobId; import org.apache.hadoop.mapreduce.v2.app.AppContext; import org.apache.hadoop.mapreduce.v2.app.MRAppMaster.RunningAppContext; import org.apache.hadoop.mapreduce.v2.app.client.ClientService; import org.apache.hadoop.mapreduce.v2.app.job.Job; import org.apache.hadoop.mapreduce.v2.app.job.JobStateInternal; import org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl; import org.apache.hadoop.mapreduce.v2.util.MRWebAppUtil; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.service.AbstractService; import org.apache.hadoop.yarn.api.ApplicationMasterProtocol; import org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest; import org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterResponse; import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest; import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse; import org.apache.hadoop.yarn.api.records.ApplicationAccessType; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.FinalApplicationStatus; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.client.ClientRMProxy; import org.apache.hadoop.yarn.event.EventHandler; import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import java.io.IOException; import java.net.InetSocketAddress; import java.nio.ByteBuffer; import java.util.Map; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.atomic.AtomicBoolean; /** * Registers/unregisters to RM and sends heartbeats to RM. */ public abstract class RMCommunicator extends AbstractService implements RMHeartbeatHandler { private static final Log LOG = LogFactory.getLog(RMContainerAllocator.class); private int rmPollInterval;//millis protected ApplicationId applicationId; private final AtomicBoolean stopped; protected Thread allocatorThread; @SuppressWarnings("rawtypes") protected EventHandler eventHandler; protected ApplicationMasterProtocol scheduler; private final ClientService clientService; protected int lastResponseID; private Resource maxContainerCapability; protected Map<ApplicationAccessType, String> applicationACLs; private volatile long lastHeartbeatTime; private ConcurrentLinkedQueue<Runnable> heartbeatCallbacks; private final RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null); private final AppContext context; private Job job; // Has a signal (SIGTERM etc) been issued? protected volatile boolean isSignalled = false; private volatile boolean shouldUnregister = true; private boolean isApplicationMasterRegistered = false; public RMCommunicator(ClientService clientService, AppContext context) { super("RMCommunicator"); this.clientService = clientService; this.context = context; this.eventHandler = context.getEventHandler(); this.applicationId = context.getApplicationID(); this.stopped = new AtomicBoolean(false); this.heartbeatCallbacks = new ConcurrentLinkedQueue<Runnable>(); } @Override protected void serviceInit(Configuration conf) throws Exception { super.serviceInit(conf); rmPollInterval = conf.getInt(MRJobConfig.MR_AM_TO_RM_HEARTBEAT_INTERVAL_MS, MRJobConfig.DEFAULT_MR_AM_TO_RM_HEARTBEAT_INTERVAL_MS); } @Override protected void serviceStart() throws Exception { scheduler= createSchedulerProxy(); JobID id = TypeConverter.fromYarn(this.applicationId); JobId jobId = TypeConverter.toYarn(id); job = context.getJob(jobId); register(); startAllocatorThread(); super.serviceStart(); } protected AppContext getContext() { return context; } protected Job getJob() { return job; } /** * Get the appProgress. Can be used only after this component is started. * @return the appProgress. */ protected float getApplicationProgress() { // For now just a single job. In future when we have a DAG, we need an // aggregate progress. return this.job.getProgress(); } protected void register() { //Register InetSocketAddress serviceAddr = null; if (clientService != null ) { serviceAddr = clientService.getBindAddress(); } try { RegisterApplicationMasterRequest request = recordFactory.newRecordInstance(RegisterApplicationMasterRequest.class); if (serviceAddr != null) { request.setHost(serviceAddr.getHostName()); request.setRpcPort(serviceAddr.getPort()); request.setTrackingUrl(MRWebAppUtil .getAMWebappScheme(getConfig()) + serviceAddr.getHostName() + ":" + clientService.getHttpPort()); } RegisterApplicationMasterResponse response = scheduler.registerApplicationMaster(request); isApplicationMasterRegistered = true; maxContainerCapability = response.getMaximumResourceCapability(); this.context.getClusterInfo().setMaxContainerCapability( maxContainerCapability); if (UserGroupInformation.isSecurityEnabled()) { setClientToAMToken(response.getClientToAMTokenMasterKey()); } this.applicationACLs = response.getApplicationACLs(); LOG.info("maxContainerCapability: " + maxContainerCapability.getMemory()); String queue = response.getQueue(); LOG.info("queue: " + queue); job.setQueueName(queue); } catch (Exception are) { LOG.error("Exception while registering", are); throw new YarnRuntimeException(are); } } private void setClientToAMToken(ByteBuffer clientToAMTokenMasterKey) { byte[] key = clientToAMTokenMasterKey.array(); context.getClientToAMTokenSecretManager().setMasterKey(key); } protected void unregister() { try { doUnregistration(); } catch(Exception are) { LOG.error("Exception while unregistering ", are); // if unregistration failed, isLastAMRetry needs to be recalculated // to see whether AM really has the chance to retry RunningAppContext raContext = (RunningAppContext) context; raContext.computeIsLastAMRetry(); } } @VisibleForTesting protected void doUnregistration() throws YarnException, IOException, InterruptedException { FinalApplicationStatus finishState = FinalApplicationStatus.UNDEFINED; JobImpl jobImpl = (JobImpl)job; if (jobImpl.getInternalState() == JobStateInternal.SUCCEEDED) { finishState = FinalApplicationStatus.SUCCEEDED; } else if (jobImpl.getInternalState() == JobStateInternal.KILLED || (jobImpl.getInternalState() == JobStateInternal.RUNNING && isSignalled)) { finishState = FinalApplicationStatus.KILLED; } else if (jobImpl.getInternalState() == JobStateInternal.FAILED || jobImpl.getInternalState() == JobStateInternal.ERROR) { finishState = FinalApplicationStatus.FAILED; } StringBuffer sb = new StringBuffer(); for (String s : job.getDiagnostics()) { sb.append(s).append("\n"); } LOG.info("Setting job diagnostics to " + sb.toString()); String historyUrl = MRWebAppUtil.getApplicationWebURLOnJHSWithScheme(getConfig(), context.getApplicationID()); LOG.info("History url is " + historyUrl); FinishApplicationMasterRequest request = FinishApplicationMasterRequest.newInstance(finishState, sb.toString(), historyUrl); while (true) { FinishApplicationMasterResponse response = scheduler.finishApplicationMaster(request); if (response.getIsUnregistered()) { // When excepting ClientService, other services are already stopped, // it is safe to let clients know the final states. ClientService // should wait for some time so clients have enough time to know the // final states. RunningAppContext raContext = (RunningAppContext) context; raContext.markSuccessfulUnregistration(); break; } LOG.info("Waiting for application to be successfully unregistered."); Thread.sleep(rmPollInterval); } } protected Resource getMaxContainerCapability() { return maxContainerCapability; } @Override protected void serviceStop() throws Exception { if (stopped.getAndSet(true)) { // return if already stopped return; } if (allocatorThread != null) { allocatorThread.interrupt(); try { allocatorThread.join(); } catch (InterruptedException ie) { LOG.warn("InterruptedException while stopping", ie); } } if (isApplicationMasterRegistered && shouldUnregister) { unregister(); } super.serviceStop(); } protected void startAllocatorThread() { allocatorThread = new Thread(new Runnable() { @Override public void run() { while (!stopped.get() && !Thread.currentThread().isInterrupted()) { try { System.out.println( "[SAMAN][RMCommunicator][startAllocatorThread] poll interval: " + rmPollInterval ); Thread.sleep(rmPollInterval); try { heartbeat(); } catch (YarnRuntimeException e) { LOG.error("Error communicating with RM: " + e.getMessage() , e); return; } catch (Exception e) { LOG.error("ERROR IN CONTACTING RM. ", e); continue; // TODO: for other exceptions } lastHeartbeatTime = context.getClock().getTime(); executeHeartbeatCallbacks(); } catch (InterruptedException e) { if (!stopped.get()) { LOG.warn("Allocated thread interrupted. Returning."); } return; } } } }); allocatorThread.setName("RMCommunicator Allocator"); allocatorThread.start(); } protected ApplicationMasterProtocol createSchedulerProxy() { final Configuration conf = getConfig(); try { return ClientRMProxy.createRMProxy(conf, ApplicationMasterProtocol.class); } catch (IOException e) { throw new YarnRuntimeException(e); } } protected abstract void heartbeat() throws Exception; private void executeHeartbeatCallbacks() { Runnable callback = null; while ((callback = heartbeatCallbacks.poll()) != null) { callback.run(); } } @Override public long getLastHeartbeatTime() { return lastHeartbeatTime; } @Override public void runOnNextHeartbeat(Runnable callback) { heartbeatCallbacks.add(callback); } public void setShouldUnregister(boolean shouldUnregister) { this.shouldUnregister = shouldUnregister; LOG.info("RMCommunicator notified that shouldUnregistered is: " + shouldUnregister); } public void setSignalled(boolean isSignalled) { this.isSignalled = isSignalled; LOG.info("RMCommunicator notified that iSignalled is: " + isSignalled); } @VisibleForTesting protected boolean isApplicationMasterRegistered() { return isApplicationMasterRegistered; } }
/* * Copyright (c) 2005-2011 Grameen Foundation USA * All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. * * See also http://www.apache.org/licenses/LICENSE-2.0.html for an * explanation of the license and how it is applied. */ package org.mifos.accounts.business; import java.sql.Date; import java.util.ArrayList; import java.util.Calendar; import java.util.GregorianCalendar; import java.util.Iterator; import java.util.List; import java.util.Set; import junit.framework.Assert; import org.hibernate.HibernateException; import org.hibernate.Session; import org.joda.time.DateTime; import org.junit.After; import org.junit.Ignore; import org.junit.Test; import org.mifos.accounts.AccountIntegrationTestCase; import org.mifos.accounts.exceptions.AccountException; import org.mifos.accounts.fees.business.FeeBO; import org.mifos.accounts.fees.util.helpers.FeeCategory; import org.mifos.accounts.fees.util.helpers.FeePayment; import org.mifos.accounts.financial.business.FinancialTransactionBO; import org.mifos.accounts.loan.business.LoanBO; import org.mifos.accounts.loan.business.LoanTrxnDetailEntity; import org.mifos.accounts.util.helpers.AccountState; import org.mifos.accounts.util.helpers.PaymentData; import org.mifos.application.meeting.business.MeetingBO; import org.mifos.application.meeting.util.helpers.MeetingType; import org.mifos.application.meeting.util.helpers.RecurrenceType; import org.mifos.config.AccountingRules; import org.mifos.config.persistence.ConfigurationPersistence; import org.mifos.customers.center.business.CenterBO; import org.mifos.customers.persistence.CustomerPersistence; import org.mifos.customers.personnel.business.PersonnelBO; import org.mifos.dto.screen.TransactionHistoryDto; import org.mifos.framework.TestUtils; import org.mifos.framework.exceptions.PersistenceException; import org.mifos.framework.exceptions.SystemException; import org.mifos.framework.hibernate.helper.StaticHibernateUtil; import org.mifos.framework.util.helpers.IntegrationTestObjectMother; import org.mifos.framework.util.helpers.TestObjectFactory; import org.mifos.security.util.UserContext; public class AccountBOIntegrationTest extends AccountIntegrationTestCase { private static final double DELTA = 0.00000001; @After @Override public void tearDown() throws Exception { new ConfigurationPersistence().updateConfigurationKeyValueInteger("repaymentSchedulesIndependentOfMeetingIsEnabled", 0); } /* * When LSIM is turned on, back dated transactions should be allowed. */ @Test public void testIsTrxnDateValidWithLSIM() throws Exception { new ConfigurationPersistence().updateConfigurationKeyValueInteger("repaymentSchedulesIndependentOfMeetingIsEnabled", 1); DateTime transactionDate = new DateTime(); transactionDate = transactionDate.plusDays(10); java.util.Date trxnDate = transactionDate.toDate(); PersonnelBO loggedInUser = IntegrationTestObjectMother.testUser(); groupLoan.changeStatus(AccountState.LOAN_APPROVED, null, "status changed", loggedInUser); Assert.assertTrue(AccountingRules.isBackDatedTxnAllowed()); Date meetingDate = new CustomerPersistence().getLastMeetingDateForCustomer(groupLoan.getCustomer().getCustomerId()); Assert.assertTrue(groupLoan.isTrxnDateValid(trxnDate, meetingDate, false)); } /** * The name of this test, and some now-gone (and broken) exception-catching code, make it look like it was supposed * to test failure. But it doesn't (and I don't see a corresponding success test). */ @Test public void testFailureRemoveFees() throws Exception { StaticHibernateUtil.getSessionTL(); StaticHibernateUtil.startTransaction(); UserContext uc = TestUtils.makeUser(); Set<AccountFeesEntity> accountFeesEntitySet = groupLoan.getAccountFees(); for (AccountFeesEntity accountFeesEntity : accountFeesEntitySet) { groupLoan.removeFeesAssociatedWithUpcomingAndAllKnownFutureInstallments(accountFeesEntity.getFees().getFeeId(), uc.getId()); } StaticHibernateUtil.flushAndClearSession(); } @Test public void testSuccessGetLastPmntAmntToBeAdjusted() throws Exception { LoanBO loan = groupLoan; Date currentDate = new Date(System.currentTimeMillis()); Date startDate = new Date(System.currentTimeMillis()); loan.setAccountState(new AccountStateEntity(AccountState.LOAN_APPROVED)); disburseLoan(loan, startDate); List<AccountActionDateEntity> accntActionDates = new ArrayList<AccountActionDateEntity>(); accntActionDates.addAll(loan.getAccountActionDates()); PaymentData firstPaymentData = TestObjectFactory .getLoanAccountPaymentData(accntActionDates, TestUtils.createMoney(88), null, loan.getPersonnel(), "receiptNum", Short.valueOf("1"), currentDate, currentDate); IntegrationTestObjectMother.applyAccountPayment(loan, firstPaymentData); TestObjectFactory.updateObject(loan); StaticHibernateUtil.flushAndClearSession(); // the loan has to be reloaded from db so that the payment list will be // in desc order and the // last payment will be the first in the payment list Session session = StaticHibernateUtil.getSessionTL(); loan = (LoanBO) session.get(LoanBO.class, loan.getAccountId()); Assert.assertEquals(88.0, loan.getLastPmntAmntToBeAdjusted(), DELTA); groupLoan = TestObjectFactory.getObject(LoanBO.class, loan.getAccountId()); } @Test public void testSuccessUpdateAccountActionDateEntity() { List<Short> installmentIdList; installmentIdList = getApplicableInstallmentIdsForRemoveFees(groupLoan); Set<AccountFeesEntity> accountFeesEntitySet = groupLoan.getAccountFees(); Iterator<AccountFeesEntity> itr = accountFeesEntitySet.iterator(); while (itr.hasNext()) { groupLoan.updateAccountActionDateEntity(installmentIdList, itr.next().getFees().getFeeId()); } } @Ignore @Test public void testSuccessUpdateAccountFeesEntity() { Set<AccountFeesEntity> accountFeesEntitySet = groupLoan.getAccountFees(); Assert.assertEquals(1, accountFeesEntitySet.size()); Iterator<AccountFeesEntity> itr = accountFeesEntitySet.iterator(); while (itr.hasNext()) { AccountFeesEntity accountFeesEntity = itr.next(); groupLoan.updateAccountFeesEntity(accountFeesEntity.getFees().getFeeId()); Assert.assertFalse(accountFeesEntity.isActive()); } } @Test public void testGetLastLoanPmntAmnt() throws Exception { Date currentDate = new Date(System.currentTimeMillis()); LoanBO loan = groupLoan; List<AccountActionDateEntity> accntActionDates = new ArrayList<AccountActionDateEntity>(); accntActionDates.addAll(loan.getAccountActionDates()); PaymentData paymentData = TestObjectFactory.getLoanAccountPaymentData(accntActionDates, TestUtils .createMoney(700), null, loan.getPersonnel(), "receiptNum", Short.valueOf("1"), currentDate, currentDate); IntegrationTestObjectMother.applyAccountPayment(loan, paymentData); TestObjectFactory.updateObject(loan); StaticHibernateUtil.flushSession(); Assert.assertEquals("The amount returned for the payment should have been 1272", 700.0, loan.getLastPmntAmnt()); groupLoan = TestObjectFactory.getObject(LoanBO.class, loan.getAccountId()); } @Test public void testLoanAdjustment() throws Exception { Date currentDate = new Date(System.currentTimeMillis()); LoanBO loan = groupLoan; loan.setUserContext(TestUtils.makeUser()); List<AccountActionDateEntity> accntActionDates = new ArrayList<AccountActionDateEntity>(); accntActionDates.add(loan.getAccountActionDate(Short.valueOf("1"))); PaymentData accountPaymentDataView = TestObjectFactory.getLoanAccountPaymentData(accntActionDates, TestUtils .createMoney(216), null, loan.getPersonnel(), "receiptNum", Short.valueOf("1"), currentDate, currentDate); IntegrationTestObjectMother.applyAccountPayment(loan, accountPaymentDataView); loan = IntegrationTestObjectMother.findLoanBySystemId(loan.getGlobalAccountNum()); loan.setUserContext(TestUtils.makeUser()); IntegrationTestObjectMother.applyAccountPayment(loan, TestObjectFactory.getLoanAccountPaymentData(null, TestUtils.createMoney(600), null, loan.getPersonnel(), "receiptNum", Short.valueOf("1"), currentDate, currentDate)); loan = IntegrationTestObjectMother.findLoanBySystemId(loan.getGlobalAccountNum()); loan.setUserContext(TestUtils.makeUser()); PersonnelBO loggedInUser = IntegrationTestObjectMother.testUser(); loan.adjustPmnt("loan account has been adjusted by test code", loggedInUser); IntegrationTestObjectMother.saveLoanAccount(loan); Assert.assertEquals("The amount returned for the payment should have been 0", 0.0, loan.getLastPmntAmnt()); LoanTrxnDetailEntity lastLoanTrxn = null; for (AccountTrxnEntity accntTrxn : loan.findMostRecentPaymentByPaymentDate().getAccountTrxns()) { lastLoanTrxn = (LoanTrxnDetailEntity) accntTrxn; break; } AccountActionDateEntity installment = loan.getAccountActionDate(lastLoanTrxn.getInstallmentId()); Assert.assertFalse("The installment adjusted should now be marked unpaid(due).", installment.isPaid()); } @Test public void testAdjustmentForClosedAccnt() throws Exception { Date currentDate = new Date(System.currentTimeMillis()); LoanBO loan = groupLoan; loan.setUserContext(TestUtils.makeUser()); List<AccountActionDateEntity> accntActionDates = new ArrayList<AccountActionDateEntity>(); accntActionDates.addAll(loan.getAccountActionDates()); PaymentData accountPaymentDataView = TestObjectFactory.getLoanAccountPaymentData(accntActionDates, TestUtils .createMoney(712), null, loan.getPersonnel(), "receiptNum", Short.valueOf("1"), currentDate, currentDate); IntegrationTestObjectMother.applyAccountPayment(loan, accountPaymentDataView); loan.setAccountState(new AccountStateEntity(AccountState.LOAN_CLOSED_OBLIGATIONS_MET)); TestObjectFactory.updateObject(loan); try { PersonnelBO loggedInUser = IntegrationTestObjectMother.testUser(); loan.adjustPmnt("loan account has been adjusted by test code", loggedInUser); } catch (AccountException e) { Assert.assertEquals("exception.accounts.ApplicationException.CannotAdjust", e.getKey()); } } @Test public void testRetrievalOfNullMonetaryValue() throws Exception { Date currentDate = new Date(System.currentTimeMillis()); LoanBO loan = groupLoan; loan.setUserContext(TestUtils.makeUser()); List<AccountActionDateEntity> accntActionDates = new ArrayList<AccountActionDateEntity>(); accntActionDates.addAll(loan.getAccountActionDates()); PaymentData accountPaymentDataView = TestObjectFactory.getLoanAccountPaymentData(accntActionDates, TestUtils .createMoney(0), null, loan.getPersonnel(), "receiptNum", Short.valueOf("1"), currentDate, currentDate); IntegrationTestObjectMother.applyAccountPayment(loan, accountPaymentDataView); TestObjectFactory.updateObject(loan); StaticHibernateUtil.flushSession(); loan = TestObjectFactory.getObject(LoanBO.class, loan.getAccountId()); List<AccountPaymentEntity> payments = loan.getAccountPayments(); Assert.assertEquals(1, payments.size()); AccountPaymentEntity accntPmnt = payments.iterator().next(); StaticHibernateUtil.flushSession(); Assert.assertEquals("Account payment retrieved should be zero with currency MFI currency", TestUtils .createMoney(0), accntPmnt.getAmount()); } @Test public void testGetTransactionHistoryView() throws Exception { Date currentDate = new Date(System.currentTimeMillis()); LoanBO loan = groupLoan; loan.setUserContext(TestUtils.makeUser()); List<AccountActionDateEntity> accntActionDates = new ArrayList<AccountActionDateEntity>(); accntActionDates.addAll(loan.getAccountActionDates()); PaymentData accountPaymentDataView = TestObjectFactory.getLoanAccountPaymentData(accntActionDates, TestUtils .createMoney(100), null, loan.getPersonnel(), "receiptNum", Short.valueOf("1"), currentDate, currentDate); IntegrationTestObjectMother.applyAccountPayment(loan, accountPaymentDataView); loan = TestObjectFactory.getObject(LoanBO.class, loan.getAccountId()); List<Integer> ids = new ArrayList<Integer>(); for (AccountPaymentEntity accountPaymentEntity : loan.getAccountPayments()) { for (AccountTrxnEntity accountTrxnEntity : accountPaymentEntity.getAccountTrxns()) { for (FinancialTransactionBO financialTransactionBO : accountTrxnEntity.getFinancialTransactions()) { ids.add(financialTransactionBO.getTrxnId()); } } } loan.setUserContext(TestUtils.makeUser()); List<TransactionHistoryDto> trxnHistlist = loan.getTransactionHistoryView(); Assert.assertNotNull("Account TrxnHistoryView list object should not be null", trxnHistlist); Assert.assertTrue("Account TrxnHistoryView list object Size should be greater than zero", trxnHistlist.size() > 0); Assert.assertEquals(ids.size(), trxnHistlist.size()); int i = 0; for (TransactionHistoryDto transactionHistoryDto : trxnHistlist) { Assert.assertEquals(ids.get(i), transactionHistoryDto.getAccountTrxnId()); i++; } StaticHibernateUtil.flushSession(); groupLoan = TestObjectFactory.getObject(LoanBO.class, loan.getAccountId()); } @Test public void testGetTransactionHistoryViewByOtherUser() throws Exception { Date currentDate = new Date(System.currentTimeMillis()); LoanBO loan = groupLoan; loan.setUserContext(TestUtils.makeUser()); List<AccountActionDateEntity> accntActionDates = new ArrayList<AccountActionDateEntity>(); accntActionDates.addAll(loan.getAccountActionDates()); PersonnelBO personnel = legacyPersonnelDao.getPersonnel(Short.valueOf("2")); PaymentData accountPaymentDataView = TestObjectFactory.getLoanAccountPaymentData(accntActionDates, TestUtils .createMoney(100), null, personnel, "receiptNum", Short.valueOf("1"), currentDate, currentDate); IntegrationTestObjectMother.applyAccountPayment(loan, accountPaymentDataView); loan = TestObjectFactory.getObject(LoanBO.class, loan.getAccountId()); loan.setUserContext(TestUtils.makeUser()); List<TransactionHistoryDto> trxnHistlist = loan.getTransactionHistoryView(); Assert.assertNotNull("Account TrxnHistoryView list object should not be null", trxnHistlist); Assert.assertTrue("Account TrxnHistoryView list object Size should be greater than zero", trxnHistlist.size() > 0); for (TransactionHistoryDto transactionHistoryDto : trxnHistlist) { Assert.assertEquals(transactionHistoryDto.getPostedBy(), personnel.getDisplayName()); } StaticHibernateUtil.flushSession(); groupLoan = TestObjectFactory.getObject(LoanBO.class, loan.getAccountId()); } @Ignore @Test public void testGetPeriodicFeeList() throws PersistenceException { FeeBO oneTimeFee = TestObjectFactory.createOneTimeAmountFee("One Time Fee", FeeCategory.LOAN, "20", FeePayment.TIME_OF_DISBURSEMENT); AccountFeesEntity accountOneTimeFee = new AccountFeesEntity(groupLoan, oneTimeFee, new Double("1.0")); groupLoan.addAccountFees(accountOneTimeFee); legacyAccountDao.createOrUpdate(groupLoan); StaticHibernateUtil.flushSession(); groupLoan = TestObjectFactory.getObject(LoanBO.class, groupLoan.getAccountId()); Assert.assertEquals(1, groupLoan.getPeriodicFeeList().size()); } @Test public void testIsTrxnDateValid() throws Exception { Calendar calendar = new GregorianCalendar(); // Added by rajender on 24th July as test case was not passing calendar.add(Calendar.DAY_OF_MONTH, 10); java.util.Date trxnDate = new Date(calendar.getTimeInMillis()); Date meetingDate = new CustomerPersistence().getLastMeetingDateForCustomer(groupLoan.getCustomer().getCustomerId()); if (AccountingRules.isBackDatedTxnAllowed()) { Assert.assertTrue(groupLoan.isTrxnDateValid(trxnDate, meetingDate, false)); } else { Assert.assertFalse(groupLoan.isTrxnDateValid(trxnDate, meetingDate, false)); } } @Test public void testDeleteFutureInstallments() throws HibernateException, SystemException, AccountException { StaticHibernateUtil.flushSession(); groupLoan = TestObjectFactory.getObject(LoanBO.class, groupLoan.getAccountId()); groupLoan.deleteFutureInstallments(); StaticHibernateUtil.flushAndClearSession(); groupLoan = TestObjectFactory.getObject(LoanBO.class, groupLoan.getAccountId()); Assert.assertEquals(1, groupLoan.getAccountActionDates().size()); } @Test public void testUpdate() throws Exception { StaticHibernateUtil.flushSession(); groupLoan = (LoanBO) StaticHibernateUtil.getSessionTL().get(LoanBO.class, groupLoan.getAccountId()); groupLoan.setUserContext(TestUtils.makeUser()); java.sql.Date currentDate = new java.sql.Date(System.currentTimeMillis()); PersonnelBO personnelBO = legacyPersonnelDao.getPersonnel(TestUtils.makeUser().getId()); AccountNotesEntity accountNotesEntity = new AccountNotesEntity(currentDate, "account updated", personnelBO, groupLoan); groupLoan.addAccountNotes(accountNotesEntity); TestObjectFactory.updateObject(groupLoan); StaticHibernateUtil.flushSession(); groupLoan = (LoanBO) StaticHibernateUtil.getSessionTL().get(LoanBO.class, groupLoan.getAccountId()); for (AccountNotesEntity accountNotes : groupLoan.getRecentAccountNotes()) { Assert.assertEquals("Last note added is account updated", "account updated", accountNotes.getComment()); Assert.assertEquals(currentDate.toString(), accountNotes.getCommentDateStr()); Assert.assertEquals(personnelBO.getPersonnelId(), accountNotes.getPersonnel().getPersonnelId()); Assert.assertEquals(personnelBO.getDisplayName(), accountNotes.getPersonnel().getDisplayName()); Assert.assertEquals(currentDate.toString(), accountNotes.getCommentDate().toString()); Assert.assertEquals(groupLoan.getAccountId(), accountNotes.getAccount().getAccountId()); Assert.assertNotNull(accountNotes.getCommentId()); break; } } @Test public void testGetPastInstallments() { MeetingBO meeting = TestObjectFactory.createMeeting(TestObjectFactory.getTypicalMeeting()); CenterBO centerBO = TestObjectFactory.createWeeklyFeeCenter("Center_Active", meeting); StaticHibernateUtil.flushAndClearSession(); centerBO = TestObjectFactory.getCenter(centerBO.getCustomerId()); for (AccountActionDateEntity actionDate : centerBO.getCustomerAccount().getAccountActionDates()) { actionDate.setActionDate(offSetCurrentDate(4)); break; } List<AccountActionDateEntity> pastInstallments = centerBO.getCustomerAccount().getPastInstallments(); Assert.assertNotNull(pastInstallments); Assert.assertEquals(1, pastInstallments.size()); centerBO = null; } @Test public void testGetAllInstallments() { MeetingBO meeting = TestObjectFactory.createMeeting(TestObjectFactory.getTypicalMeeting()); CenterBO centerBO = TestObjectFactory.createWeeklyFeeCenter("Center_Active", meeting); StaticHibernateUtil.flushAndClearSession(); centerBO = TestObjectFactory.getCenter(centerBO.getCustomerId()); List<AccountActionDateEntity> allInstallments = centerBO.getCustomerAccount().getAllInstallments(); Assert.assertNotNull(allInstallments); Assert.assertEquals(10, allInstallments.size()); centerBO = null; } @Test public void testUpdatePerformanceHistoryOnAdjustment() throws Exception { Date currentDate = new Date(System.currentTimeMillis()); List<AccountActionDateEntity> accntActionDates = new ArrayList<AccountActionDateEntity>(); PaymentData paymentData1 = TestObjectFactory.getLoanAccountPaymentData(accntActionDates, TestUtils .createMoney(212), null, groupLoan.getPersonnel(), "receiptNum", Short.valueOf("1"), currentDate, currentDate); IntegrationTestObjectMother.applyAccountPayment(groupLoan, paymentData1); IntegrationTestObjectMother.saveLoanAccount(groupLoan); LoanBO loan = IntegrationTestObjectMother.findLoanBySystemId(groupLoan.getGlobalAccountNum()); PaymentData paymentData2 = TestObjectFactory.getLoanAccountPaymentData(null, TestUtils.createMoney(600), null, loan.getPersonnel(), "receiptNum", Short.valueOf("1"), currentDate, currentDate); IntegrationTestObjectMother.applyAccountPayment(loan, paymentData2); IntegrationTestObjectMother.saveLoanAccount(groupLoan); groupLoan = IntegrationTestObjectMother.findLoanBySystemId(groupLoan.getGlobalAccountNum()); groupLoan.setUserContext(TestUtils.makeUser()); PersonnelBO loggedInUser = IntegrationTestObjectMother.testUser(); groupLoan.adjustPmnt("loan account has been adjusted by test code", loggedInUser); IntegrationTestObjectMother.saveLoanAccount(groupLoan); } @Test public void testAccountBOClosedDate() { AccountBO account = new AccountBO(); java.util.Date originalDate = new java.util.Date(); final long TEN_SECONDS = 10000; // verify that after the setter is called, changes to the object // passed to the setter do not affect the internal state java.util.Date mutatingDate1 = (java.util.Date) originalDate.clone(); account.setClosedDate(mutatingDate1); mutatingDate1.setTime(System.currentTimeMillis() + TEN_SECONDS); Assert.assertEquals(account.getClosedDate(), originalDate); // verify that after the getter is called, changes to the object // returned by the getter do not affect the internal state java.util.Date originalDate2 = (java.util.Date) originalDate.clone(); account.setClosedDate(originalDate2); java.util.Date mutatingDate2 = account.getClosedDate(); mutatingDate2.setTime(System.currentTimeMillis() + TEN_SECONDS); Assert.assertEquals(account.getClosedDate(), originalDate); } @Test public void testGetInstalmentDates() throws Exception { AccountBO account = new AccountBO(); MeetingBO meeting = new MeetingBO(RecurrenceType.DAILY, (short) 1, getDate("18/08/2005"), MeetingType.CUSTOMER_MEETING); /* * make sure we can handle case where the number of installments is zero */ account.getInstallmentDates(meeting, (short) 0, (short) 0); } @Test public void testGenerateId() throws Exception { AccountBO account = new AccountBO(35); String officeGlobalNum = "0567"; String globalAccountNum = account.generateId(officeGlobalNum); Assert.assertEquals("056700000000035", globalAccountNum); } private List<Short> getApplicableInstallmentIdsForRemoveFees(final AccountBO account) { List<Short> installmentIdList = new ArrayList<Short>(); for (AccountActionDateEntity accountActionDateEntity : account.getApplicableIdsForFutureInstallments()) { installmentIdList.add(accountActionDateEntity.getInstallmentId()); } installmentIdList.add(account.getDetailsOfNextInstallment().getInstallmentId()); return installmentIdList; } private void disburseLoan(final LoanBO loan, final Date startDate) throws Exception { loan.disburseLoan("receiptNum", startDate, Short.valueOf("1"), loan.getPersonnel(), startDate, Short.valueOf("1")); StaticHibernateUtil.flushAndClearSession(); } private java.sql.Date offSetCurrentDate(final int noOfDays) { Calendar currentDateCalendar = new GregorianCalendar(); int year = currentDateCalendar.get(Calendar.YEAR); int month = currentDateCalendar.get(Calendar.MONTH); int day = currentDateCalendar.get(Calendar.DAY_OF_MONTH); currentDateCalendar = new GregorianCalendar(year, month, day - noOfDays); return new java.sql.Date(currentDateCalendar.getTimeInMillis()); } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0 and the Server Side Public License, v 1; you may not use this file except * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket; import org.elasticsearch.search.aggregations.bucket.terms.IncludeExclude; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.metrics.Sum; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; import org.elasticsearch.test.ESIntegTestCase; import java.util.ArrayList; import java.util.List; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; import static org.elasticsearch.search.aggregations.AggregationBuilders.sum; import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; import static org.elasticsearch.search.aggregations.PipelineAggregatorBuilders.sumBucket; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.core.IsNull.notNullValue; @ESIntegTestCase.SuiteScopeTestCase public class SumBucketIT extends ESIntegTestCase { private static final String SINGLE_VALUED_FIELD_NAME = "l_value"; static int numDocs; static int interval; static int minRandomValue; static int maxRandomValue; static int numValueBuckets; static long[] valueCounts; @Override public void setupSuiteScopeCluster() throws Exception { assertAcked(client().admin().indices().prepareCreate("idx") .setMapping("tag", "type=keyword").get()); createIndex("idx_unmapped"); numDocs = randomIntBetween(6, 20); interval = randomIntBetween(2, 5); minRandomValue = 0; maxRandomValue = 20; numValueBuckets = ((maxRandomValue - minRandomValue) / interval) + 1; valueCounts = new long[numValueBuckets]; List<IndexRequestBuilder> builders = new ArrayList<>(); for (int i = 0; i < numDocs; i++) { int fieldValue = randomIntBetween(minRandomValue, maxRandomValue); builders.add(client().prepareIndex("idx").setSource( jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, fieldValue).field("tag", "tag" + (i % interval)) .endObject())); final int bucket = (fieldValue / interval); // + (fieldValue < 0 ? -1 : 0) - (minRandomValue / interval - 1); valueCounts[bucket]++; } assertAcked(prepareCreate("empty_bucket_idx").setMapping(SINGLE_VALUED_FIELD_NAME, "type=integer")); for (int i = 0; i < 2; i++) { builders.add(client().prepareIndex("empty_bucket_idx").setId("" + i).setSource( jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, i * 2).endObject())); } indexRandom(true, builders); ensureSearchable(); } public void testDocCountTopLevel() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) .extendedBounds(minRandomValue, maxRandomValue)) .addAggregation(sumBucket("sum_bucket", "histo>_count")).get(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(numValueBuckets)); double sum = 0; for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval)); assertThat(bucket.getDocCount(), equalTo(valueCounts[i])); sum += bucket.getDocCount(); } InternalSimpleValue sumBucketValue = response.getAggregations().get("sum_bucket"); assertThat(sumBucketValue, notNullValue()); assertThat(sumBucketValue.getName(), equalTo("sum_bucket")); assertThat(sumBucketValue.value(), equalTo(sum)); } public void testDocCountAsSubAgg() throws Exception { SearchResponse response = client() .prepareSearch("idx") .addAggregation( terms("terms") .field("tag") .order(BucketOrder.key(true)) .subAggregation( histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) .extendedBounds(minRandomValue, maxRandomValue)) .subAggregation(sumBucket("sum_bucket", "histo>_count"))).get(); assertSearchResponse(response); Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); List<? extends Terms.Bucket> termsBuckets = terms.getBuckets(); assertThat(termsBuckets.size(), equalTo(interval)); for (int i = 0; i < interval; ++i) { Terms.Bucket termsBucket = termsBuckets.get(i); assertThat(termsBucket, notNullValue()); assertThat((String) termsBucket.getKey(), equalTo("tag" + (i % interval))); Histogram histo = termsBucket.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); double sum = 0; for (int j = 0; j < numValueBuckets; ++j) { Histogram.Bucket bucket = buckets.get(j); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) j * interval)); sum += bucket.getDocCount(); } InternalSimpleValue sumBucketValue = termsBucket.getAggregations().get("sum_bucket"); assertThat(sumBucketValue, notNullValue()); assertThat(sumBucketValue.getName(), equalTo("sum_bucket")); assertThat(sumBucketValue.value(), equalTo(sum)); } } public void testMetricTopLevel() throws Exception { SearchResponse response = client() .prepareSearch("idx") .addAggregation(terms("terms").field("tag").subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) .addAggregation(sumBucket("sum_bucket", "terms>sum")).get(); assertSearchResponse(response); Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); List<? extends Terms.Bucket> buckets = terms.getBuckets(); assertThat(buckets.size(), equalTo(interval)); double bucketSum = 0; for (int i = 0; i < interval; ++i) { Terms.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("tag" + (i % interval))); assertThat(bucket.getDocCount(), greaterThan(0L)); Sum sum = bucket.getAggregations().get("sum"); assertThat(sum, notNullValue()); bucketSum += sum.value(); } InternalSimpleValue sumBucketValue = response.getAggregations().get("sum_bucket"); assertThat(sumBucketValue, notNullValue()); assertThat(sumBucketValue.getName(), equalTo("sum_bucket")); assertThat(sumBucketValue.value(), equalTo(bucketSum)); } public void testMetricAsSubAgg() throws Exception { SearchResponse response = client() .prepareSearch("idx") .addAggregation( terms("terms") .field("tag") .order(BucketOrder.key(true)) .subAggregation( histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) .extendedBounds(minRandomValue, maxRandomValue) .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) .subAggregation(sumBucket("sum_bucket", "histo>sum"))).get(); assertSearchResponse(response); Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); List<? extends Terms.Bucket> termsBuckets = terms.getBuckets(); assertThat(termsBuckets.size(), equalTo(interval)); for (int i = 0; i < interval; ++i) { Terms.Bucket termsBucket = termsBuckets.get(i); assertThat(termsBucket, notNullValue()); assertThat((String) termsBucket.getKey(), equalTo("tag" + (i % interval))); Histogram histo = termsBucket.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); double bucketSum = 0; for (int j = 0; j < numValueBuckets; ++j) { Histogram.Bucket bucket = buckets.get(j); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) j * interval)); if (bucket.getDocCount() != 0) { Sum sum = bucket.getAggregations().get("sum"); assertThat(sum, notNullValue()); bucketSum += sum.value(); } } InternalSimpleValue sumBucketValue = termsBucket.getAggregations().get("sum_bucket"); assertThat(sumBucketValue, notNullValue()); assertThat(sumBucketValue.getName(), equalTo("sum_bucket")); assertThat(sumBucketValue.value(), equalTo(bucketSum)); } } public void testMetricAsSubAggWithInsertZeros() throws Exception { SearchResponse response = client() .prepareSearch("idx") .addAggregation( terms("terms") .field("tag") .order(BucketOrder.key(true)) .subAggregation( histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) .extendedBounds(minRandomValue, maxRandomValue) .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) .subAggregation(sumBucket("sum_bucket", "histo>sum").gapPolicy(GapPolicy.INSERT_ZEROS))) .get(); assertSearchResponse(response); Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); List<? extends Terms.Bucket> termsBuckets = terms.getBuckets(); assertThat(termsBuckets.size(), equalTo(interval)); for (int i = 0; i < interval; ++i) { Terms.Bucket termsBucket = termsBuckets.get(i); assertThat(termsBucket, notNullValue()); assertThat((String) termsBucket.getKey(), equalTo("tag" + (i % interval))); Histogram histo = termsBucket.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); double bucketSum = 0; for (int j = 0; j < numValueBuckets; ++j) { Histogram.Bucket bucket = buckets.get(j); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) j * interval)); Sum sum = bucket.getAggregations().get("sum"); assertThat(sum, notNullValue()); bucketSum += sum.value(); } InternalSimpleValue sumBucketValue = termsBucket.getAggregations().get("sum_bucket"); assertThat(sumBucketValue, notNullValue()); assertThat(sumBucketValue.getName(), equalTo("sum_bucket")); assertThat(sumBucketValue.value(), equalTo(bucketSum)); } } public void testNoBuckets() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(terms("terms").field("tag").includeExclude(new IncludeExclude(null, "tag.*")) .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) .addAggregation(sumBucket("sum_bucket", "terms>sum")).get(); assertSearchResponse(response); Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); List<? extends Terms.Bucket> buckets = terms.getBuckets(); assertThat(buckets.size(), equalTo(0)); InternalSimpleValue sumBucketValue = response.getAggregations().get("sum_bucket"); assertThat(sumBucketValue, notNullValue()); assertThat(sumBucketValue.getName(), equalTo("sum_bucket")); assertThat(sumBucketValue.value(), equalTo(0.0)); } public void testNested() throws Exception { SearchResponse response = client() .prepareSearch("idx") .addAggregation( terms("terms") .field("tag") .order(BucketOrder.key(true)) .subAggregation( histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) .extendedBounds(minRandomValue, maxRandomValue)) .subAggregation(sumBucket("sum_histo_bucket", "histo>_count"))) .addAggregation(sumBucket("sum_terms_bucket", "terms>sum_histo_bucket")).get(); assertSearchResponse(response); Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); List<? extends Terms.Bucket> termsBuckets = terms.getBuckets(); assertThat(termsBuckets.size(), equalTo(interval)); double aggTermsSum = 0; for (int i = 0; i < interval; ++i) { Terms.Bucket termsBucket = termsBuckets.get(i); assertThat(termsBucket, notNullValue()); assertThat((String) termsBucket.getKey(), equalTo("tag" + (i % interval))); Histogram histo = termsBucket.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); double aggHistoSum = 0; for (int j = 0; j < numValueBuckets; ++j) { Histogram.Bucket bucket = buckets.get(j); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) j * interval)); aggHistoSum += bucket.getDocCount(); } InternalSimpleValue sumBucketValue = termsBucket.getAggregations().get("sum_histo_bucket"); assertThat(sumBucketValue, notNullValue()); assertThat(sumBucketValue.getName(), equalTo("sum_histo_bucket")); assertThat(sumBucketValue.value(), equalTo(aggHistoSum)); aggTermsSum += aggHistoSum; } InternalSimpleValue sumBucketValue = response.getAggregations().get("sum_terms_bucket"); assertThat(sumBucketValue, notNullValue()); assertThat(sumBucketValue.getName(), equalTo("sum_terms_bucket")); assertThat(sumBucketValue.value(), equalTo(aggTermsSum)); } }
/*- * * * Copyright 2015 Skymind,Inc. * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * * */ package org.nd4j.linalg.api.complex; import org.apache.commons.math3.util.FastMath; /** * Base complex float class * * @author Adam Gibson */ public abstract class BaseComplexFloat implements IComplexFloat { protected float real, imag; public BaseComplexFloat() {} public BaseComplexFloat(Float real, Float imag) { this.real = real; this.imag = imag; } public BaseComplexFloat(float real, float imag) { this.real = real; this.imag = imag; } public BaseComplexFloat(float real) { this(real, 0); } @Override public IComplexNumber eqc(IComplexNumber num) { double val = num.realComponent().doubleValue(); double imag = num.imaginaryComponent().doubleValue(); double otherVal = num.realComponent().doubleValue(); double otherImag = num.imaginaryComponent().doubleValue(); /* if (val == otherVal) return Nd4j.createComplexNumber(1, 0); else if (val != otherVal) return Nd4j.createComplexNumber(0, 0); else if (imag == otherImag) return Nd4j.createComplexNumber(1, 0); else return Nd4j.createComplexNumber(0, 0);*/ throw new UnsupportedOperationException(); } @Override public IComplexNumber neqc(IComplexNumber num) { double val = num.realComponent().doubleValue(); double imag = num.imaginaryComponent().doubleValue(); double otherVal = num.realComponent().doubleValue(); double otherImag = num.imaginaryComponent().doubleValue(); /* if (val != otherVal) return Nd4j.createComplexNumber(1, 0); else if (val == otherVal) return Nd4j.createComplexNumber(0, 0); else if (imag != otherImag) return Nd4j.createComplexNumber(1, 0); else return Nd4j.createComplexNumber(0, 0);*/ throw new UnsupportedOperationException(); } @Override public IComplexNumber gt(IComplexNumber num) { double val = num.realComponent().doubleValue(); double imag = num.imaginaryComponent().doubleValue(); double otherVal = num.realComponent().doubleValue(); double otherImag = num.imaginaryComponent().doubleValue(); /* if (val > otherVal) return Nd4j.createComplexNumber(1, 0); else if (val < otherVal) return Nd4j.createComplexNumber(0, 0); else if (imag > otherImag) return Nd4j.createComplexNumber(1, 0); else return Nd4j.createComplexNumber(0, 0);*/ throw new UnsupportedOperationException(); } @Override public IComplexNumber lt(IComplexNumber num) { double val = num.realComponent().doubleValue(); double imag = num.imaginaryComponent().doubleValue(); double otherVal = num.realComponent().doubleValue(); double otherImag = num.imaginaryComponent().doubleValue(); /* if (val < otherVal) return Nd4j.createComplexNumber(1, 0); else if (val > otherVal) return Nd4j.createComplexNumber(0, 0); else if (imag < otherImag) return Nd4j.createComplexNumber(1, 0); else return Nd4j.createComplexNumber(0, 0);*/ throw new UnsupportedOperationException(); } @Override public IComplexNumber rsubi(IComplexNumber c) { return rsubi(c, this); } @Override public IComplexNumber set(IComplexNumber set) { return set(realComponent().floatValue(), imaginaryComponent().floatValue()); } @Override public IComplexNumber rsubi(IComplexNumber a, IComplexNumber result) { return result.set(a.sub(this)); } @Override public IComplexNumber rsub(IComplexNumber c) { return dup().rsubi(c); } @Override public IComplexNumber rsubi(Number a, IComplexNumber result) { return result.set(a.doubleValue() - realComponent().doubleValue(), imaginaryComponent()); } @Override public IComplexNumber rsubi(Number a) { return rsubi(a, this); } @Override public IComplexNumber rsub(Number r) { return dup().rsubi(r); } @Override public IComplexNumber rdiv(IComplexNumber c) { return dup().rdivi(c); } @Override public IComplexNumber rdivi(IComplexNumber c, IComplexNumber result) { return result.set(c.div(this)); } @Override public IComplexNumber rdivi(IComplexNumber c) { return rdivi(c, this); } @Override public IComplexNumber rdivi(Number v, IComplexNumber result) { float d = realComponent().floatValue() * realComponent().floatValue() + imaginaryComponent().floatValue() * imaginaryComponent().floatValue(); return result.set(v.floatValue() * realComponent().floatValue() / d, -v.floatValue() * imaginaryComponent().floatValue() / d); } @Override public IComplexNumber rdivi(Number v) { return rdivi(v, this); } @Override public IComplexNumber rdiv(Number v) { return dup().rdivi(v); } /** * Convert to a double * * @return this complex number as a double */ @Override public IComplexDouble asDouble() { //return Nd4j.createDouble(realComponent(), imaginaryComponent()); throw new UnsupportedOperationException(); } /** * Convert to a float * * @return this complex number as a float */ @Override public IComplexFloat asFloat() { return this; } @Override public IComplexFloat conji() { set(realComponent(), -imaginaryComponent()); return this; } @Override public IComplexNumber conj() { return dup().conji(); } @Override public IComplexNumber set(Number real, Number imag) { this.real = real.floatValue(); this.imag = imag.floatValue(); return this; } @Override public IComplexNumber copy(IComplexNumber other) { // return Nd4j.createFloat(other.realComponent().floatValue(), other.imaginaryComponent().floatValue()); throw new UnsupportedOperationException(); } /** * Add two complex numbers in-place * * @param c * @param result */ @Override public IComplexNumber addi(IComplexNumber c, IComplexNumber result) { return result.set(result.realComponent().floatValue() + c.realComponent().floatValue(), result.imaginaryComponent().floatValue() + c.imaginaryComponent().floatValue()); } /** * Add two complex numbers in-place storing the result in this. * * @param c */ @Override public IComplexNumber addi(IComplexNumber c) { return addi(c, this); } /** * Add two complex numbers. * * @param c */ @Override public IComplexNumber add(IComplexNumber c) { return dup().addi(c); } /** * Add a realComponent number to a complex number in-place. * * @param a * @param result */ @Override public IComplexNumber addi(Number a, IComplexNumber result) { return result.set(realComponent().floatValue() + a.floatValue(), imaginaryComponent().floatValue()); } /** * Add a realComponent number to complex number in-place, storing the result in this. * * @param c */ @Override public IComplexNumber addi(Number c) { return addi(c, this); } /** * Add a realComponent number to a complex number. * * @param c */ @Override public IComplexNumber add(Number c) { return dup().addi(c); } /** * Subtract two complex numbers, in-place * * @param c * @param result */ @Override public IComplexNumber subi(IComplexNumber c, IComplexNumber result) { return result.set(realComponent().floatValue() - c.realComponent().floatValue(), imaginaryComponent().floatValue() - c.imaginaryComponent().floatValue()); } @Override public IComplexNumber subi(IComplexNumber c) { return subi(c, this); } /** * Subtract two complex numbers * * @param c */ @Override public IComplexNumber sub(IComplexNumber c) { return dup().subi(c); } @Override public IComplexNumber subi(Number a, IComplexNumber result) { return result.set(realComponent().floatValue() - a.floatValue(), imaginaryComponent()); } @Override public IComplexNumber subi(Number a) { return subi(a, this); } @Override public IComplexNumber sub(Number r) { return dup().subi(r); } /** * Multiply two complex numbers, inplace * * @param c * @param result */ @Override public IComplexNumber muli(IComplexNumber c, IComplexNumber result) { float newR = realComponent() * c.realComponent().floatValue() - imaginaryComponent() * c.imaginaryComponent().floatValue(); float newI = realComponent() * c.imaginaryComponent().floatValue() + imaginaryComponent() * c.realComponent().floatValue(); return result.set(newR, newI); } @Override public IComplexNumber muli(IComplexNumber c) { return muli(c, this); } /** * Multiply two complex numbers * * @param c */ @Override public IComplexNumber mul(IComplexNumber c) { return dup().muli(c); } @Override public IComplexNumber mul(Number v) { return dup().muli(v); } @Override public IComplexNumber muli(Number v, IComplexNumber result) { return result.set(realComponent().floatValue() * v.floatValue(), imaginaryComponent().floatValue() * v.floatValue()); } @Override public IComplexNumber muli(Number v) { return muli(v, this); } @Override public IComplexNumber exp() { IComplexNumber result = dup(); double realExp = FastMath.exp(realComponent()); return result.set(realExp * FastMath.cos(imaginaryComponent()), realExp * FastMath.sin(imaginaryComponent())); } @Override public IComplexNumber powi(IComplexNumber c, IComplexNumber result) { IComplexNumber eval = log().muli(c).exp(); result.set(eval.realComponent(), eval.imaginaryComponent()); return result; } @Override public IComplexNumber pow(Number v) { return dup().powi(v); } @Override public IComplexNumber pow(IComplexNumber c) { return dup().powi(c); } @Override public IComplexNumber powi(IComplexNumber c) { return dup().powi(c, this); } @Override public IComplexNumber powi(Number v) { return dup().powi(v, this); } @Override public IComplexNumber powi(Number v, IComplexNumber result) { IComplexNumber eval = log().muli(v).exp(); result.set(eval.realComponent(), eval.imaginaryComponent()); return result; } /** * Divide two complex numbers * * @param c */ @Override public IComplexNumber div(IComplexNumber c) { return dup().divi(c); } /** * Divide two complex numbers, in-place * * @param c * @param result */ @Override public IComplexNumber divi(IComplexNumber c, IComplexNumber result) { float d = c.realComponent().floatValue() * c.realComponent().floatValue() + c.imaginaryComponent().floatValue() * c.imaginaryComponent().floatValue(); float newR = (realComponent() * c.realComponent().floatValue() + imaginaryComponent() * c.imaginaryComponent().floatValue()) / d; float newI = (imaginaryComponent() * c.realComponent().floatValue() - realComponent() * c.imaginaryComponent().floatValue()) / d; return result.set(newR, newI); } @Override public IComplexNumber divi(IComplexNumber c) { return divi(c, this); } @Override public IComplexNumber divi(Number v, IComplexNumber result) { return result.set(realComponent().floatValue() / v.floatValue(), imaginaryComponent().floatValue() / v.floatValue()); } @Override public IComplexNumber divi(Number v) { return divi(v, this); } @Override public IComplexNumber div(Number v) { return dup().divi(v); } @Override public boolean eq(IComplexNumber c) { return false; } @Override public boolean ne(IComplexNumber c) { return false; } @Override public Float realComponent() { return real; } @Override public Float imaginaryComponent() { return imag; } @Override public IComplexFloat divi(float v) { this.real = real / v; this.imag = imag / v; return this; } @Override public IComplexNumber div(float v) { return dup().divi(v); } /** * Return the absolute value */ @Override public Float absoluteValue() { return (float) Math.sqrt(real * real + imag * imag); } /** * Returns the argument of a complex number. */ @Override public Float complexArgument() { return (float) Math.acos(realComponent() / absoluteValue()); } @Override public IComplexFloat invi() { float d = realComponent() * realComponent() + imaginaryComponent() * imaginaryComponent(); set(realComponent() / d, -imaginaryComponent() / d); return this; } @Override public IComplexNumber inv() { return dup().invi(); } @Override public IComplexNumber neg() { return dup().negi(); } @Override public IComplexFloat negi() { set(-realComponent(), -imaginaryComponent()); return this; } @Override public IComplexNumber log() { IComplexNumber result = dup(); float real = (float) result.realComponent(); float imaginary = (float) result.imaginaryComponent(); double modulus = Math.sqrt(real * real + imaginary * imaginary); double arg = Math.atan2(imaginary, real); return result.set(Math.log(modulus), arg); } @Override public IComplexFloat sqrt() { float a = absoluteValue(); float s2 = (float) Math.sqrt(2); float p = (float) Math.sqrt(a + realComponent()) / s2; float q = (float) Math.sqrt(a - realComponent()) / s2 * Math.signum(imaginaryComponent()); // return Nd4j.createFloat(p, q); throw new UnsupportedOperationException(); } @Override public boolean equals(Object o) { if (this == o) return true; if (!(o instanceof BaseComplexFloat)) return false; BaseComplexFloat that = (BaseComplexFloat) o; if (Float.compare(that.real, real) != 0) return false; if (Math.abs(that.imag - imag) > 1e-12) return false; return true; } public boolean isZero() { return real == 0; } @Override public boolean isReal() { return imaginaryComponent() == 0; } @Override public boolean isImag() { return realComponent() == 0; } @Override public int hashCode() { int result = (real != +0.0f ? Float.floatToIntBits(real) : 0); result = 31 * result + (imag != +0.0f ? Float.floatToIntBits(imag) : 0); return result; } @Override public String toString() { if (imag >= 0) { return real + " + " + imag + "i"; } else { return real + " - " + (-imag) + "i"; } } }
/* * Copyright 2015 herd contributors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.finra.herd.service.impl; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; import javax.sql.DataSource; import org.apache.commons.lang3.StringUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.dao.DataAccessException; import org.springframework.jdbc.CannotGetJdbcConnectionException; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.jdbc.datasource.DriverManagerDataSource; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Propagation; import org.springframework.transaction.annotation.Transactional; import org.springframework.transaction.support.DefaultTransactionDefinition; import org.springframework.util.Assert; import org.finra.herd.core.helper.ConfigurationHelper; import org.finra.herd.dao.JdbcDao; import org.finra.herd.dao.S3Dao; import org.finra.herd.model.api.xml.JdbcConnection; import org.finra.herd.model.api.xml.JdbcDatabaseType; import org.finra.herd.model.api.xml.JdbcExecutionRequest; import org.finra.herd.model.api.xml.JdbcExecutionResponse; import org.finra.herd.model.api.xml.JdbcStatement; import org.finra.herd.model.api.xml.JdbcStatementResultSet; import org.finra.herd.model.api.xml.JdbcStatementStatus; import org.finra.herd.model.api.xml.JdbcStatementType; import org.finra.herd.model.api.xml.S3PropertiesLocation; import org.finra.herd.model.dto.ConfigurationValue; import org.finra.herd.model.dto.S3FileTransferRequestParamsDto; import org.finra.herd.service.JdbcService; import org.finra.herd.service.helper.StorageDaoHelper; import org.finra.herd.service.helper.VelocityHelper; /** * Default implementation of {@link org.finra.herd.service.JdbcService} which uses Spring's JDBC wrapper framework to handle connections and transactions. */ @Service public class JdbcServiceImpl implements JdbcService { public static final String DRIVER_REDSHIFT = "com.amazon.redshift.jdbc41.Driver"; public static final String DRIVER_POSTGRES = "org.postgresql.Driver"; public static final String DRIVER_ORACLE = "oracle.jdbc.OracleDriver"; @Autowired private JdbcDao jdbcDao; @Autowired private VelocityHelper velocityHelper; @Autowired private S3Dao s3Dao; @Autowired private ConfigurationHelper configurationHelper; @Autowired private StorageDaoHelper storageDaoHelper; /** * This implementation uses a {@link DriverManagerDataSource} and {@link DefaultTransactionDefinition}. It suspends the existing transaction and purposely * runs this logic in "no transaction" to ensure we don't create a connection that would potentially become idle while all JDBC tasks execute. If the * underlying connection pool has an abandoned connection timeout, it would reclaim and close the connection. Then when all the JDBC tasks below finish, * this transaction would try to commit and would generate a "commit failed" exception because the connection is already closed. This approach is fine since * we are not actually doing any "herd" DB operations below. When all the below JDBC operations are finished, nothing would happen here except the callers * transaction would pick up where it left off which would be needed to write workflow variables, etc. */ @Override @Transactional(propagation = Propagation.NOT_SUPPORTED) public JdbcExecutionResponse executeJdbc(JdbcExecutionRequest jdbcExecutionRequest) { return executeJdbcImpl(jdbcExecutionRequest); } /** * This implementation uses a {@link DriverManagerDataSource}. Uses existing Spring ORM transaction. * * @param jdbcExecutionRequest JDBC execution request * * @return {@link JdbcExecutionResponse} */ protected JdbcExecutionResponse executeJdbcImpl(JdbcExecutionRequest jdbcExecutionRequest) { validateJdbcExecutionRequest(jdbcExecutionRequest); // Optionally, get properties from S3 S3PropertiesLocation s3PropertiesLocation = jdbcExecutionRequest.getS3PropertiesLocation(); Map<String, Object> variables = getVariablesFromS3(s3PropertiesLocation); // Create data source DataSource dataSource = createDataSource(jdbcExecutionRequest.getConnection(), variables); // Execute the requested statements List<JdbcStatement> requestJdbcStatements = jdbcExecutionRequest.getStatements(); List<JdbcStatement> responseJdbcStatements = executeStatements(requestJdbcStatements, dataSource, variables); // Create and return the execution result return new JdbcExecutionResponse(null, responseJdbcStatements); } /** * Returns a map of key-value from the specified S3 properties location. Returns null if the specified location is null. * * @param s3PropertiesLocation the location of a Java properties file in S3 * * @return {@link Map} of key-values */ private Map<String, Object> getVariablesFromS3(S3PropertiesLocation s3PropertiesLocation) { Map<String, Object> variables = null; if (s3PropertiesLocation != null) { Properties properties = getProperties(s3PropertiesLocation); variables = new HashMap<>(); for (Map.Entry<Object, Object> e : properties.entrySet()) { variables.put(e.getKey().toString(), e.getValue()); } } return variables; } /** * Gets an S3 object from the specified location, and parses it as a Java properties. * * @param s3PropertiesLocation {@link S3PropertiesLocation} * * @return {@link Properties} */ private Properties getProperties(S3PropertiesLocation s3PropertiesLocation) { String s3BucketName = s3PropertiesLocation.getBucketName().trim(); String s3ObjectKey = s3PropertiesLocation.getKey().trim(); S3FileTransferRequestParamsDto s3FileTransferRequestParamsDto = storageDaoHelper.getS3FileTransferRequestParamsDto(); return s3Dao.getProperties(s3BucketName, s3ObjectKey, s3FileTransferRequestParamsDto); } /** * Validates parameters specified in the request. * * @param jdbcExecutionRequest the request to validate * * @throws IllegalArgumentException when there are validation errors in any of the parameters */ private void validateJdbcExecutionRequest(JdbcExecutionRequest jdbcExecutionRequest) { Assert.notNull(jdbcExecutionRequest, "JDBC execution request is required"); validateJdbcConnection(jdbcExecutionRequest.getConnection()); validateJdbcStatements(jdbcExecutionRequest.getStatements()); validateS3PropertiesLocation(jdbcExecutionRequest.getS3PropertiesLocation()); } /** * Validates the specified S3 properties location. Asserts that if the given location is not null, bucket name and key are not blank. * * @param s3PropertiesLocation the {@link S3PropertiesLocation} to validate */ private void validateS3PropertiesLocation(S3PropertiesLocation s3PropertiesLocation) { if (s3PropertiesLocation != null) { Assert.isTrue(StringUtils.isNotBlank(s3PropertiesLocation.getBucketName()), "S3 properties location bucket name is required"); Assert.isTrue(StringUtils.isNotBlank(s3PropertiesLocation.getKey()), "S3 properties location key is required"); } } /** * Validates parameters specified in the given statements. The statements must not be null, and must not be empty. * * @param jdbcStatements the list of statements to validate */ private void validateJdbcStatements(List<JdbcStatement> jdbcStatements) { Assert.notNull(jdbcStatements, "JDBC statements are required"); Assert.isTrue(!jdbcStatements.isEmpty(), "JDBC statements are required"); Integer jdbcMaxStatements = configurationHelper.getProperty(ConfigurationValue.JDBC_MAX_STATEMENTS, Integer.class); if (jdbcMaxStatements != null) { Assert.isTrue(jdbcStatements.size() <= jdbcMaxStatements, "The number of JDBC statements exceeded the maximum allowed " + jdbcMaxStatements + "."); } for (int i = 0; i < jdbcStatements.size(); i++) { JdbcStatement jdbcStatement = jdbcStatements.get(i); validateJdbcStatement(jdbcStatement, i); } } /** * Validates parameters specified in the given statement. * * @param jdbcStatement statement to validate * @param jdbcStatementIndex the index number of the statement in the list */ private void validateJdbcStatement(JdbcStatement jdbcStatement, int jdbcStatementIndex) { Assert.notNull(jdbcStatement, "JDBC statement [" + jdbcStatementIndex + "] is required"); Assert.notNull(jdbcStatement.getType(), "JDBC statement [" + jdbcStatementIndex + "] type is required"); validateSqlStatement(jdbcStatement.getSql(), jdbcStatementIndex); } /** * Validates parameters specified in the given connection. This method does not validate whether the connection can be established. * * @param jdbcConnection the JDBC connection to validate */ private void validateJdbcConnection(JdbcConnection jdbcConnection) { Assert.notNull(jdbcConnection, "JDBC connection is required"); validateUrl(jdbcConnection.getUrl()); Assert.notNull(jdbcConnection.getUsername(), "JDBC connection user name is required"); Assert.notNull(jdbcConnection.getPassword(), "JDBC connection password is required"); Assert.notNull(jdbcConnection.getDatabaseType(), "JDBC connection database type is required"); } /** * Executes the requested statements in order. Returns the result of the execution. * * @param requestJdbcStatements the list of statements to execute, in order * @param dataSource the data source * @param variables the mapping of variables * * @return List of response {@link JdbcStatement} */ private List<JdbcStatement> executeStatements(List<JdbcStatement> requestJdbcStatements, DataSource dataSource, Map<String, Object> variables) { List<JdbcStatement> responseJdbcStatements = new ArrayList<>(); /* * Create a copy of all the request statements. * The copied statements are the response statements. The response statements are defaulted to SKIPPED. */ for (JdbcStatement requestJdbcStatement : requestJdbcStatements) { JdbcStatement responseJdbcStatement = createDefaultResponseJdbcStatement(requestJdbcStatement); responseJdbcStatements.add(responseJdbcStatement); } // We will reuse this template for all executions JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource); /* * Execute each statement. * If there were any errors, and continueOnError is not TRUE, then the execution will stop. * The un-executed response statements will remain in their SKIPPED status. */ for (int i = 0; i < responseJdbcStatements.size(); i++) { JdbcStatement jdbcStatement = responseJdbcStatements.get(i); executeStatement(jdbcTemplate, jdbcStatement, variables, i); if (JdbcStatementStatus.ERROR.equals(jdbcStatement.getStatus()) && !Boolean.TRUE.equals(jdbcStatement.isContinueOnError())) { break; } } return responseJdbcStatements; } /** * Executes a single statement using the given JDBC template. The given statement will be updated with the result and status. * * @param jdbcTemplate the JDBC template * @param jdbcStatement the JDBC statement to execute * @param variables the mapping of variables * @param jdbcStatementIndex the index of the statement */ private void executeStatement(JdbcTemplate jdbcTemplate, JdbcStatement jdbcStatement, Map<String, Object> variables, int jdbcStatementIndex) { // This is the exception to be set as the error message in the response Throwable exception = null; try { String sql = evaluate(jdbcStatement.getSql(), variables, "jdbc statement sql"); validateSqlStatement(sql, jdbcStatementIndex); // Process UPDATE type statements if (JdbcStatementType.UPDATE.equals(jdbcStatement.getType())) { int result = jdbcDao.update(jdbcTemplate, sql); jdbcStatement.setStatus(JdbcStatementStatus.SUCCESS); jdbcStatement.setResult(String.valueOf(result)); } // Process QUERY type statements else if (JdbcStatementType.QUERY.equals(jdbcStatement.getType())) { Integer maxResults = configurationHelper.getProperty(ConfigurationValue.JDBC_RESULT_MAX_ROWS, Integer.class); JdbcStatementResultSet jdbcStatementResultSet = jdbcDao.query(jdbcTemplate, sql, maxResults); jdbcStatement.setStatus(JdbcStatementStatus.SUCCESS); jdbcStatement.setResultSet(jdbcStatementResultSet); } // Any other statement types are unrecognized. This case should not be possible unless developer error. else { throw new IllegalStateException("Unsupported JDBC statement type '" + jdbcStatement.getType() + "'"); } } catch (CannotGetJdbcConnectionException cannotGetJdbcConnectionException) { /* * When the statement fails to execute due to connection errors. This usually indicates that the connection information which was specified is * wrong, or there is a network issue. Either way, it would indicate user error. * We get the wrapped exception and throw again as an IllegalArgumentException. */ Throwable causeThrowable = cannotGetJdbcConnectionException.getCause(); throw new IllegalArgumentException(String.valueOf(causeThrowable).trim(), cannotGetJdbcConnectionException); } catch (DataAccessException dataAccessException) { // DataAccessException's cause is a SQLException which is thrown by driver // We will use the SQLException message result exception = dataAccessException.getCause(); } // If there was an error if (exception != null) { // Set status to error and result as message jdbcStatement.setStatus(JdbcStatementStatus.ERROR); jdbcStatement.setErrorMessage(maskSensitiveInformation(exception, variables)); } } /** * Returns the message of the given exception, masking any sensitive information indicated by the given collection of sensitive data. If the variables is * null, no masking will occur. * * @param exception the exception message to mask * @param variables the mapping of variables with sensitive information * * @return The exception message with masked data. */ private String maskSensitiveInformation(Throwable exception, Map<String, Object> variables) { String message = String.valueOf(exception).trim(); if (variables != null) { for (Object sensitiveData : variables.values()) { String sensitiveDataString = String.valueOf(sensitiveData); message = message.replace(sensitiveDataString, "****"); } } return message; } /** * Validates the given SQL statement where its position in the list of statement is the given index. This method does not validate SQL syntax. * * @param sql the SQL statement to validate * @param jdbcStatementIndex the index of the statement to validate in the list of statements */ private void validateSqlStatement(String sql, int jdbcStatementIndex) { Assert.isTrue(StringUtils.isNotBlank(sql), "JDBC statement [" + jdbcStatementIndex + "] SQL is required"); } /** * Creates and returns a {@link JdbcStatement} at a state which has not yet been executed based on the given request. * <p/> * The status will be set to {@link JdbcStatementStatus#SKIPPED} and result null. * * @param requestJdbcStatement the requested JDBC statement * * @return a new {@link JdbcStatement} */ private JdbcStatement createDefaultResponseJdbcStatement(JdbcStatement requestJdbcStatement) { JdbcStatement responseJdbcStatement = new JdbcStatement(); responseJdbcStatement.setType(requestJdbcStatement.getType()); responseJdbcStatement.setSql(requestJdbcStatement.getSql()); responseJdbcStatement.setContinueOnError(requestJdbcStatement.isContinueOnError()); responseJdbcStatement.setStatus(JdbcStatementStatus.SKIPPED); return responseJdbcStatement; } /** * Creates and returns a new data source from the given connection information. Creates a new {@link DriverManagerDataSource}. * * @param jdbcConnection the JDBC connection * @param variables the optional map of key-value for expression evaluation * * @return a new {@link DataSource} */ private DataSource createDataSource(JdbcConnection jdbcConnection, Map<String, Object> variables) { String url = evaluate(jdbcConnection.getUrl(), variables, "jdbc connection url"); String username = evaluate(jdbcConnection.getUsername(), variables, "jdbc connection username"); String password = evaluate(jdbcConnection.getPassword(), variables, "jdbc connection password"); validateUrl(url); DriverManagerDataSource driverManagerDataSource = new DriverManagerDataSource(); driverManagerDataSource.setUrl(url); driverManagerDataSource.setUsername(username); driverManagerDataSource.setPassword(password); driverManagerDataSource.setDriverClassName(getDriverClassName(jdbcConnection.getDatabaseType())); return driverManagerDataSource; } /** * Validates the given URL. Does not validate URL syntax or whether the URL is accessible. * * @param url the URL string to validate */ private void validateUrl(String url) { Assert.isTrue(StringUtils.isNotBlank(url), "JDBC connection URL is required"); } /** * Evaluates the given expression as a Velocity template using the given variables. Returns the expression as-is if the variables is null. The given * variable name will be used as the log tag. * * @param expression the expression * @param variables the mapping of variables * @param variableName the variable name * * @return the expression evaluated as a Velocity template */ private String evaluate(String expression, Map<String, Object> variables, String variableName) { String result = expression; if (variables != null) { result = velocityHelper.evaluate(expression, variables, variableName); } return result; } /** * Returns the fully qualified driver class name of the given JDBC database type. * * @param jdbcDatabaseType the JDBC database type * * @return fully qualified driver class name * @throws IllegalArgumentException when the database type is not supported. */ private String getDriverClassName(JdbcDatabaseType jdbcDatabaseType) { switch (jdbcDatabaseType) { case ORACLE: return DRIVER_ORACLE; case POSTGRES: return DRIVER_POSTGRES; case REDSHIFT: return DRIVER_REDSHIFT; default: throw new IllegalArgumentException("Unsupported database type '" + jdbcDatabaseType + "'"); } } }
/* * Copyright 2014-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.apple; import static com.facebook.buck.swift.SwiftUtil.Constants.SWIFT_EXTENSION; import com.facebook.buck.cxx.BuildRuleWithBinary; import com.facebook.buck.cxx.CxxBinaryDescription; import com.facebook.buck.cxx.CxxCompilationDatabase; import com.facebook.buck.cxx.CxxConstructorArg; import com.facebook.buck.cxx.CxxDescriptionEnhancer; import com.facebook.buck.cxx.CxxLibraryDescription; import com.facebook.buck.cxx.CxxPlatform; import com.facebook.buck.cxx.CxxStrip; import com.facebook.buck.cxx.FrameworkDependencies; import com.facebook.buck.cxx.LinkerMapMode; import com.facebook.buck.cxx.ProvidesLinkedBinaryDeps; import com.facebook.buck.cxx.StripStyle; import com.facebook.buck.io.MorePaths; import com.facebook.buck.model.BuildTarget; import com.facebook.buck.model.Either; import com.facebook.buck.model.Flavor; import com.facebook.buck.model.FlavorDomain; import com.facebook.buck.model.ImmutableFlavor; import com.facebook.buck.parser.NoSuchBuildTargetException; import com.facebook.buck.rules.BuildRule; import com.facebook.buck.rules.BuildRuleParams; import com.facebook.buck.rules.BuildRuleResolver; import com.facebook.buck.rules.BuildRules; import com.facebook.buck.rules.BuildTargetSourcePath; import com.facebook.buck.rules.PathSourcePath; import com.facebook.buck.rules.SourcePath; import com.facebook.buck.rules.SourcePathResolver; import com.facebook.buck.rules.SourcePathRuleFinder; import com.facebook.buck.rules.SourcePaths; import com.facebook.buck.rules.SourceWithFlags; import com.facebook.buck.rules.TargetGraph; import com.facebook.buck.rules.TargetNode; import com.facebook.buck.rules.Tool; import com.facebook.buck.rules.coercer.SourceList; import com.facebook.buck.shell.AbstractGenruleDescription; import com.facebook.buck.util.HumanReadableException; import com.facebook.buck.util.MoreCollectors; import com.facebook.buck.util.OptionalCompat; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Function; import com.google.common.base.Joiner; import com.google.common.base.Preconditions; import com.google.common.base.Predicate; import com.google.common.base.Predicates; import com.google.common.base.Suppliers; import com.google.common.collect.FluentIterable; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedMap; import com.google.common.collect.ImmutableSortedSet; import com.google.common.collect.Iterables; import com.google.common.collect.Ordering; import com.google.common.collect.Sets; import java.nio.file.Path; import java.nio.file.Paths; import java.util.HashSet; import java.util.Optional; import java.util.Set; /** * Common logic for a {@link com.facebook.buck.rules.Description} that creates Apple target rules. */ public class AppleDescriptions { public static final Flavor FRAMEWORK_FLAVOR = ImmutableFlavor.of("framework"); public static final Flavor INCLUDE_FRAMEWORKS_FLAVOR = ImmutableFlavor.of("include-frameworks"); public static final Flavor NO_INCLUDE_FRAMEWORKS_FLAVOR = ImmutableFlavor.of("no-include-frameworks"); public static final FlavorDomain<Boolean> INCLUDE_FRAMEWORKS = new FlavorDomain<>( "Include frameworks", ImmutableMap.of( INCLUDE_FRAMEWORKS_FLAVOR, Boolean.TRUE, NO_INCLUDE_FRAMEWORKS_FLAVOR, Boolean.FALSE)); private static final ImmutableSet<Flavor> BUNDLE_SPECIFIC_FLAVORS = ImmutableSet.of( INCLUDE_FRAMEWORKS_FLAVOR, NO_INCLUDE_FRAMEWORKS_FLAVOR); private static final String MERGED_ASSET_CATALOG_NAME = "Merged"; /** Utility class: do not instantiate. */ private AppleDescriptions() {} public static Path getHeaderPathPrefix( AppleNativeTargetDescriptionArg arg, BuildTarget buildTarget) { return Paths.get(arg.headerPathPrefix.orElse(buildTarget.getShortName())); } public static ImmutableSortedMap<String, SourcePath> convertAppleHeadersToPublicCxxHeaders( Function<SourcePath, Path> pathResolver, Path headerPathPrefix, CxxLibraryDescription.Arg arg) { // The exported headers in the populated cxx constructor arg will contain exported headers from // the apple constructor arg with the public include style. return AppleDescriptions.parseAppleHeadersForUseFromOtherTargets( pathResolver, headerPathPrefix, arg.exportedHeaders); } public static ImmutableSortedMap<String, SourcePath> convertAppleHeadersToPrivateCxxHeaders( Function<SourcePath, Path> pathResolver, Path headerPathPrefix, CxxLibraryDescription.Arg arg) { // The private headers will contain exported headers with the private include style and private // headers with both styles. return ImmutableSortedMap.<String, SourcePath>naturalOrder() .putAll( AppleDescriptions.parseAppleHeadersForUseFromTheSameTarget( pathResolver, arg.headers)) .putAll( AppleDescriptions.parseAppleHeadersForUseFromOtherTargets( pathResolver, headerPathPrefix, arg.headers)) .putAll( AppleDescriptions.parseAppleHeadersForUseFromTheSameTarget( pathResolver, arg.exportedHeaders)) .build(); } @VisibleForTesting static ImmutableSortedMap<String, SourcePath> parseAppleHeadersForUseFromOtherTargets( Function<SourcePath, Path> pathResolver, Path headerPathPrefix, SourceList headers) { if (headers.getUnnamedSources().isPresent()) { // The user specified a set of header files. For use from other targets, prepend their names // with the header path prefix. return convertToFlatCxxHeaders( headerPathPrefix, pathResolver, headers.getUnnamedSources().get()); } else { // The user specified a map from include paths to header files. Just use the specified map. return headers.getNamedSources().get(); } } @VisibleForTesting static ImmutableMap<String, SourcePath> parseAppleHeadersForUseFromTheSameTarget( Function<SourcePath, Path> pathResolver, SourceList headers) { if (headers.getUnnamedSources().isPresent()) { // The user specified a set of header files. Headers can be included from the same target // using only their file name without a prefix. return convertToFlatCxxHeaders( Paths.get(""), pathResolver, headers.getUnnamedSources().get()); } else { // The user specified a map from include paths to header files. There is nothing we need to // add on top of the exported headers. return ImmutableMap.of(); } } /** * Convert {@link SourcePath} to a mapping of {@code include path -> file path}. * <p/> * {@code include path} is the path that can be referenced in {@code #include} directives. * {@code file path} is the actual path to the file on disk. * * @throws HumanReadableException when two {@code SourcePath} yields the same IncludePath. */ @VisibleForTesting static ImmutableSortedMap<String, SourcePath> convertToFlatCxxHeaders( Path headerPathPrefix, Function<SourcePath, Path> sourcePathResolver, Set<SourcePath> headerPaths) { Set<String> includeToFile = new HashSet<String>(headerPaths.size()); ImmutableSortedMap.Builder<String, SourcePath> builder = ImmutableSortedMap.naturalOrder(); for (SourcePath headerPath : headerPaths) { Path fileName = sourcePathResolver.apply(headerPath).getFileName(); String key = headerPathPrefix.resolve(fileName).toString(); if (includeToFile.contains(key)) { ImmutableSortedMap<String, SourcePath> result = builder.build(); throw new HumanReadableException( "The same include path maps to multiple files:\n" + " Include path: %s\n" + " Conflicting files:\n" + " %s\n" + " %s", key, headerPath, result.get(key)); } includeToFile.add(key); builder.put(key, headerPath); } return builder.build(); } public static void populateCxxConstructorArg( SourcePathResolver resolver, CxxConstructorArg output, AppleNativeTargetDescriptionArg arg, BuildTarget buildTarget) { Path headerPathPrefix = AppleDescriptions.getHeaderPathPrefix(arg, buildTarget); // The resulting cxx constructor arg will have no exported headers and both headers and exported // headers specified in the apple arg will be available with both public and private include // styles. ImmutableSortedMap<String, SourcePath> headerMap = ImmutableSortedMap.<String, SourcePath>naturalOrder() .putAll( convertAppleHeadersToPublicCxxHeaders( resolver::getRelativePath, headerPathPrefix, arg)) .putAll( convertAppleHeadersToPrivateCxxHeaders( resolver::getRelativePath, headerPathPrefix, arg)) .build(); ImmutableSortedSet.Builder<SourceWithFlags> nonSwiftSrcs = ImmutableSortedSet.naturalOrder(); for (SourceWithFlags src: arg.srcs) { if (!MorePaths.getFileExtension(resolver.getAbsolutePath(src.getSourcePath())) .equalsIgnoreCase(SWIFT_EXTENSION)) { nonSwiftSrcs.add(src); } } output.srcs = nonSwiftSrcs.build(); output.platformSrcs = arg.platformSrcs; output.headers = SourceList.ofNamedSources(headerMap); output.platformHeaders = arg.platformHeaders; output.prefixHeader = arg.prefixHeader; output.compilerFlags = arg.compilerFlags; output.platformCompilerFlags = arg.platformCompilerFlags; output.langCompilerFlags = arg.langCompilerFlags; output.preprocessorFlags = arg.preprocessorFlags; output.platformPreprocessorFlags = arg.platformPreprocessorFlags; output.langPreprocessorFlags = arg.langPreprocessorFlags; output.linkerFlags = arg.linkerFlags; output.platformLinkerFlags = arg.platformLinkerFlags; output.frameworks = arg.frameworks; output.libraries = arg.libraries; output.deps = arg.deps; // This is intentionally an empty string; we put all prefixes into // the header map itself. output.headerNamespace = Optional.of(""); output.cxxRuntimeType = arg.cxxRuntimeType; output.tests = arg.tests; output.precompiledHeader = arg.precompiledHeader; } public static void populateCxxBinaryDescriptionArg( SourcePathResolver resolver, CxxBinaryDescription.Arg output, AppleNativeTargetDescriptionArg arg, BuildTarget buildTarget) { populateCxxConstructorArg( resolver, output, arg, buildTarget); output.linkStyle = arg.linkStyle; } public static void populateCxxLibraryDescriptionArg( SourcePathResolver resolver, CxxLibraryDescription.Arg output, AppleNativeTargetDescriptionArg arg, BuildTarget buildTarget) { populateCxxConstructorArg( resolver, output, arg, buildTarget); Path headerPathPrefix = AppleDescriptions.getHeaderPathPrefix(arg, buildTarget); output.headers = SourceList.ofNamedSources( convertAppleHeadersToPrivateCxxHeaders( resolver::getRelativePath, headerPathPrefix, arg)); output.exportedDeps = arg.exportedDeps; output.exportedPreprocessorFlags = arg.exportedPreprocessorFlags; output.exportedHeaders = SourceList.ofNamedSources( convertAppleHeadersToPublicCxxHeaders( resolver::getRelativePath, headerPathPrefix, arg)); output.exportedPlatformHeaders = arg.exportedPlatformHeaders; output.exportedPlatformPreprocessorFlags = arg.exportedPlatformPreprocessorFlags; output.exportedLangPreprocessorFlags = arg.exportedLangPreprocessorFlags; output.exportedLinkerFlags = arg.exportedLinkerFlags; output.exportedPlatformLinkerFlags = arg.exportedPlatformLinkerFlags; output.soname = arg.soname; output.forceStatic = arg.forceStatic; output.preferredLinkage = arg.preferredLinkage; output.linkWhole = arg.linkWhole; output.supportedPlatformsRegex = arg.supportedPlatformsRegex; output.canBeAsset = arg.canBeAsset; output.exportedDeps = arg.exportedDeps; output.xcodePublicHeadersSymlinks = arg.xcodePublicHeadersSymlinks; output.xcodePrivateHeadersSymlinks = arg.xcodePrivateHeadersSymlinks; } public static Optional<AppleAssetCatalog> createBuildRuleForTransitiveAssetCatalogDependencies( TargetGraph targetGraph, BuildRuleParams params, SourcePathResolver sourcePathResolver, ApplePlatform applePlatform, Tool actool) { TargetNode<?, ?> targetNode = targetGraph.get(params.getBuildTarget()); ImmutableSet<AppleAssetCatalogDescription.Arg> assetCatalogArgs = AppleBuildRules.collectRecursiveAssetCatalogs( targetGraph, Optional.empty(), ImmutableList.of(targetNode)); ImmutableSortedSet.Builder<SourcePath> assetCatalogDirsBuilder = ImmutableSortedSet.naturalOrder(); Optional<String> appIcon = Optional.empty(); Optional<String> launchImage = Optional.empty(); AppleAssetCatalogDescription.Optimization optimization = null; for (AppleAssetCatalogDescription.Arg arg : assetCatalogArgs) { if (optimization == null) { optimization = arg.optimization; } assetCatalogDirsBuilder.addAll(arg.dirs); if (arg.appIcon.isPresent()) { if (appIcon.isPresent()) { throw new HumanReadableException("At most one asset catalog in the dependencies of %s " + "can have a app_icon", params.getBuildTarget()); } appIcon = arg.appIcon; } if (arg.launchImage.isPresent()) { if (launchImage.isPresent()) { throw new HumanReadableException("At most one asset catalog in the dependencies of %s " + "can have a launch_image", params.getBuildTarget()); } launchImage = arg.launchImage; } if (arg.optimization != optimization) { throw new HumanReadableException("At most one asset catalog optimisation style can be " + "specified in the dependencies %s", params.getBuildTarget()); } } ImmutableSortedSet<SourcePath> assetCatalogDirs = assetCatalogDirsBuilder.build(); if (assetCatalogDirs.isEmpty()) { return Optional.empty(); } BuildRuleParams assetCatalogParams = params.copyWithChanges( params.getBuildTarget().withAppendedFlavors(AppleAssetCatalog.FLAVOR), Suppliers.ofInstance(ImmutableSortedSet.of()), Suppliers.ofInstance(ImmutableSortedSet.of())); return Optional.of( new AppleAssetCatalog( assetCatalogParams, sourcePathResolver, applePlatform.getName(), actool, assetCatalogDirs, appIcon, launchImage, optimization, MERGED_ASSET_CATALOG_NAME)); } public static Optional<CoreDataModel> createBuildRulesForCoreDataDependencies( TargetGraph targetGraph, BuildRuleParams params, SourcePathResolver sourcePathResolver, String moduleName, AppleCxxPlatform appleCxxPlatform) { TargetNode<?, ?> targetNode = targetGraph.get(params.getBuildTarget()); ImmutableSet<AppleWrapperResourceArg> coreDataModelArgs = AppleBuildRules.collectTransitiveBuildRules( targetGraph, Optional.empty(), AppleBuildRules.CORE_DATA_MODEL_DESCRIPTION_CLASSES, ImmutableList.of(targetNode)); BuildRuleParams coreDataModelParams = params.copyWithChanges( params.getBuildTarget().withAppendedFlavors(CoreDataModel.FLAVOR), Suppliers.ofInstance(ImmutableSortedSet.of()), Suppliers.ofInstance(ImmutableSortedSet.of())); if (coreDataModelArgs.isEmpty()) { return Optional.empty(); } else { return Optional.of(new CoreDataModel( coreDataModelParams, sourcePathResolver, appleCxxPlatform, moduleName, coreDataModelArgs.stream() .map(input -> new PathSourcePath(params.getProjectFilesystem(), input.path)) .collect(MoreCollectors.toImmutableSet()))); } } public static Optional<SceneKitAssets> createBuildRulesForSceneKitAssetsDependencies( TargetGraph targetGraph, BuildRuleParams params, SourcePathResolver sourcePathResolver, AppleCxxPlatform appleCxxPlatform) { TargetNode<?, ?> targetNode = targetGraph.get(params.getBuildTarget()); ImmutableSet<AppleWrapperResourceArg> sceneKitAssetsArgs = AppleBuildRules.collectTransitiveBuildRules( targetGraph, Optional.empty(), AppleBuildRules.SCENEKIT_ASSETS_DESCRIPTION_CLASSES, ImmutableList.of(targetNode)); BuildRuleParams sceneKitAssetsParams = params.copyWithChanges( params.getBuildTarget().withAppendedFlavors(SceneKitAssets.FLAVOR), Suppliers.ofInstance(ImmutableSortedSet.of()), Suppliers.ofInstance(ImmutableSortedSet.of())); if (sceneKitAssetsArgs.isEmpty()) { return Optional.empty(); } else { return Optional.of(new SceneKitAssets( sceneKitAssetsParams, sourcePathResolver, appleCxxPlatform, sceneKitAssetsArgs.stream() .map(input -> new PathSourcePath(params.getProjectFilesystem(), input.path)) .collect(MoreCollectors.toImmutableSet()))); } } static AppleDebuggableBinary createAppleDebuggableBinary( BuildRuleParams params, BuildRuleResolver resolver, BuildRule strippedBinaryRule, ProvidesLinkedBinaryDeps unstrippedBinaryRule, AppleDebugFormat debugFormat, FlavorDomain<CxxPlatform> cxxPlatformFlavorDomain, CxxPlatform defaultCxxPlatform, FlavorDomain<AppleCxxPlatform> appleCxxPlatforms) { Optional<AppleDsym> appleDsym = createAppleDsymForDebugFormat( debugFormat, params, resolver, unstrippedBinaryRule, cxxPlatformFlavorDomain, defaultCxxPlatform, appleCxxPlatforms); BuildRule buildRuleForDebugFormat; if (debugFormat == AppleDebugFormat.DWARF) { buildRuleForDebugFormat = unstrippedBinaryRule; } else { buildRuleForDebugFormat = strippedBinaryRule; } AppleDebuggableBinary rule = new AppleDebuggableBinary( params.copyWithChanges( strippedBinaryRule.getBuildTarget() .withAppendedFlavors(AppleDebuggableBinary.RULE_FLAVOR, debugFormat.getFlavor()), Suppliers.ofInstance( AppleDebuggableBinary.getRequiredRuntimeDeps( debugFormat, strippedBinaryRule, unstrippedBinaryRule, appleDsym)), Suppliers.ofInstance(ImmutableSortedSet.of())), new SourcePathResolver(new SourcePathRuleFinder(resolver)), buildRuleForDebugFormat); return rule; } private static Optional<AppleDsym> createAppleDsymForDebugFormat( AppleDebugFormat debugFormat, BuildRuleParams params, BuildRuleResolver resolver, ProvidesLinkedBinaryDeps unstrippedBinaryRule, FlavorDomain<CxxPlatform> cxxPlatformFlavorDomain, CxxPlatform defaultCxxPlatform, FlavorDomain<AppleCxxPlatform> appleCxxPlatforms) { if (debugFormat == AppleDebugFormat.DWARF_AND_DSYM) { BuildTarget dsymBuildTarget = params.getBuildTarget() .withoutFlavors(CxxStrip.RULE_FLAVOR) .withoutFlavors(StripStyle.FLAVOR_DOMAIN.getFlavors()) .withoutFlavors(AppleDebugFormat.FLAVOR_DOMAIN.getFlavors()) .withoutFlavors(LinkerMapMode.NO_LINKER_MAP.getFlavor()) .withAppendedFlavors(AppleDsym.RULE_FLAVOR); Optional<BuildRule> dsymRule = resolver.getRuleOptional(dsymBuildTarget); if (!dsymRule.isPresent()) { dsymRule = Optional.of( createAppleDsym( params.copyWithBuildTarget(dsymBuildTarget), resolver, unstrippedBinaryRule, cxxPlatformFlavorDomain, defaultCxxPlatform, appleCxxPlatforms)); } Preconditions.checkArgument(dsymRule.get() instanceof AppleDsym); return Optional.of((AppleDsym) dsymRule.get()); } return Optional.empty(); } static AppleDsym createAppleDsym( BuildRuleParams params, BuildRuleResolver resolver, ProvidesLinkedBinaryDeps unstrippedBinaryBuildRule, FlavorDomain<CxxPlatform> cxxPlatformFlavorDomain, CxxPlatform defaultCxxPlatform, FlavorDomain<AppleCxxPlatform> appleCxxPlatforms) { AppleCxxPlatform appleCxxPlatform = ApplePlatforms.getAppleCxxPlatformForBuildTarget( cxxPlatformFlavorDomain, defaultCxxPlatform, appleCxxPlatforms, unstrippedBinaryBuildRule.getBuildTarget(), MultiarchFileInfos.create(appleCxxPlatforms, unstrippedBinaryBuildRule.getBuildTarget())); AppleDsym appleDsym = new AppleDsym( params.copyWithDeps( Suppliers.ofInstance( ImmutableSortedSet.<BuildRule>naturalOrder() .add(unstrippedBinaryBuildRule) .addAll(unstrippedBinaryBuildRule.getCompileDeps()) .addAll(unstrippedBinaryBuildRule.getStaticLibraryDeps()) .build()), Suppliers.ofInstance(ImmutableSortedSet.of())), new SourcePathResolver(new SourcePathRuleFinder(resolver)), appleCxxPlatform.getDsymutil(), appleCxxPlatform.getLldb(), new BuildTargetSourcePath(unstrippedBinaryBuildRule.getBuildTarget()), AppleDsym.getDsymOutputPath(params.getBuildTarget(), params.getProjectFilesystem())); resolver.addToIndex(appleDsym); return appleDsym; } static AppleBundle createAppleBundle( FlavorDomain<CxxPlatform> cxxPlatformFlavorDomain, CxxPlatform defaultCxxPlatform, FlavorDomain<AppleCxxPlatform> appleCxxPlatforms, TargetGraph targetGraph, BuildRuleParams params, BuildRuleResolver resolver, CodeSignIdentityStore codeSignIdentityStore, ProvisioningProfileStore provisioningProfileStore, BuildTarget binary, Either<AppleBundleExtension, String> extension, Optional<String> productName, final SourcePath infoPlist, ImmutableMap<String, String> infoPlistSubstitutions, ImmutableSortedSet<BuildTarget> deps, ImmutableSortedSet<BuildTarget> tests, AppleDebugFormat debugFormat, boolean dryRunCodeSigning, boolean cacheable) throws NoSuchBuildTargetException { AppleCxxPlatform appleCxxPlatform = ApplePlatforms.getAppleCxxPlatformForBuildTarget( cxxPlatformFlavorDomain, defaultCxxPlatform, appleCxxPlatforms, params.getBuildTarget(), MultiarchFileInfos.create(appleCxxPlatforms, params.getBuildTarget())); AppleBundleDestinations destinations; if (extension.isLeft() && extension.getLeft().equals(AppleBundleExtension.FRAMEWORK)) { destinations = AppleBundleDestinations.platformFrameworkDestinations( appleCxxPlatform.getAppleSdk().getApplePlatform()); } else { destinations = AppleBundleDestinations.platformDestinations( appleCxxPlatform.getAppleSdk().getApplePlatform()); } AppleBundleResources collectedResources = AppleResources.collectResourceDirsAndFiles( targetGraph, Optional.empty(), targetGraph.get(params.getBuildTarget())); ImmutableSet.Builder<SourcePath> frameworksBuilder = ImmutableSet.builder(); if (INCLUDE_FRAMEWORKS.getRequiredValue(params.getBuildTarget())) { for (BuildTarget dep : deps) { Optional<FrameworkDependencies> frameworkDependencies = resolver.requireMetadata( BuildTarget.builder(dep) .addFlavors(FRAMEWORK_FLAVOR) .addFlavors(NO_INCLUDE_FRAMEWORKS_FLAVOR) .addFlavors(appleCxxPlatform.getCxxPlatform().getFlavor()) .build(), FrameworkDependencies.class); if (frameworkDependencies.isPresent()) { frameworksBuilder.addAll(frameworkDependencies.get().getSourcePaths()); } } } ImmutableSet<SourcePath> frameworks = frameworksBuilder.build(); SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(resolver); SourcePathResolver sourcePathResolver = new SourcePathResolver(ruleFinder); BuildRuleParams paramsWithoutBundleSpecificFlavors = stripBundleSpecificFlavors(params); Optional<AppleAssetCatalog> assetCatalog = createBuildRuleForTransitiveAssetCatalogDependencies( targetGraph, paramsWithoutBundleSpecificFlavors, sourcePathResolver, appleCxxPlatform.getAppleSdk().getApplePlatform(), appleCxxPlatform.getActool()); Optional<CoreDataModel> coreDataModel = createBuildRulesForCoreDataDependencies( targetGraph, paramsWithoutBundleSpecificFlavors, sourcePathResolver, AppleBundle.getBinaryName(params.getBuildTarget(), productName), appleCxxPlatform); Optional<SceneKitAssets> sceneKitAssets = createBuildRulesForSceneKitAssetsDependencies( targetGraph, paramsWithoutBundleSpecificFlavors, sourcePathResolver, appleCxxPlatform); // TODO(bhamiltoncx): Sort through the changes needed to make project generation work with // binary being optional. BuildRule flavoredBinaryRule = getFlavoredBinaryRule( cxxPlatformFlavorDomain, defaultCxxPlatform, targetGraph, paramsWithoutBundleSpecificFlavors.getBuildTarget().getFlavors(), resolver, binary); if (!AppleDebuggableBinary.isBuildRuleDebuggable(flavoredBinaryRule)) { debugFormat = AppleDebugFormat.NONE; } BuildTarget unstrippedTarget = flavoredBinaryRule.getBuildTarget() .withoutFlavors( CxxStrip.RULE_FLAVOR, AppleDebuggableBinary.RULE_FLAVOR, AppleBinaryDescription.APP_FLAVOR) .withoutFlavors(StripStyle.FLAVOR_DOMAIN.getFlavors()) .withoutFlavors(AppleDebugFormat.FLAVOR_DOMAIN.getFlavors()) .withoutFlavors(AppleDebuggableBinary.RULE_FLAVOR) .withoutFlavors(ImmutableSet.of(AppleBinaryDescription.APP_FLAVOR)); Optional<LinkerMapMode> linkerMapMode = LinkerMapMode.FLAVOR_DOMAIN.getValue(params.getBuildTarget()); if (linkerMapMode.isPresent()) { unstrippedTarget = unstrippedTarget.withAppendedFlavors(linkerMapMode.get().getFlavor()); } BuildRule unstrippedBinaryRule = resolver.requireRule(unstrippedTarget); BuildRule targetDebuggableBinaryRule; Optional<AppleDsym> appleDsym; if (unstrippedBinaryRule instanceof ProvidesLinkedBinaryDeps) { BuildTarget binaryBuildTarget = getBinaryFromBuildRuleWithBinary(flavoredBinaryRule) .getBuildTarget() .withoutFlavors(AppleDebugFormat.FLAVOR_DOMAIN.getFlavors()); BuildRuleParams binaryParams = params.copyWithBuildTarget(binaryBuildTarget); targetDebuggableBinaryRule = createAppleDebuggableBinary( binaryParams, resolver, getBinaryFromBuildRuleWithBinary(flavoredBinaryRule), (ProvidesLinkedBinaryDeps) unstrippedBinaryRule, debugFormat, cxxPlatformFlavorDomain, defaultCxxPlatform, appleCxxPlatforms); appleDsym = createAppleDsymForDebugFormat( debugFormat, binaryParams, resolver, (ProvidesLinkedBinaryDeps) unstrippedBinaryRule, cxxPlatformFlavorDomain, defaultCxxPlatform, appleCxxPlatforms); } else { targetDebuggableBinaryRule = unstrippedBinaryRule; appleDsym = Optional.empty(); } BuildRuleParams bundleParamsWithFlavoredBinaryDep = getBundleParamsWithUpdatedDeps( params, binary, ImmutableSet.<BuildRule>builder() .add(targetDebuggableBinaryRule) .addAll(OptionalCompat.asSet(assetCatalog)) .addAll(OptionalCompat.asSet(coreDataModel)) .addAll(OptionalCompat.asSet(sceneKitAssets)) .addAll( BuildRules.toBuildRulesFor( params.getBuildTarget(), resolver, SourcePaths.filterBuildTargetSourcePaths( Iterables.concat( ImmutableList.of( collectedResources.getAll(), frameworks))))) .addAll(OptionalCompat.asSet(appleDsym)) .build()); ImmutableMap<SourcePath, String> extensionBundlePaths = collectFirstLevelAppleDependencyBundles( params.getDeps(), destinations); return new AppleBundle( bundleParamsWithFlavoredBinaryDep, sourcePathResolver, extension, productName, infoPlist, infoPlistSubstitutions, Optional.of(getBinaryFromBuildRuleWithBinary(flavoredBinaryRule)), appleDsym, destinations, collectedResources, extensionBundlePaths, frameworks, appleCxxPlatform, assetCatalog, coreDataModel, sceneKitAssets, tests, codeSignIdentityStore, provisioningProfileStore, dryRunCodeSigning, cacheable); } private static BuildRule getBinaryFromBuildRuleWithBinary(BuildRule rule) { if (rule instanceof BuildRuleWithBinary) { rule = ((BuildRuleWithBinary) rule).getBinaryBuildRule(); } return rule; } private static BuildRule getFlavoredBinaryRule( FlavorDomain<CxxPlatform> cxxPlatformFlavorDomain, CxxPlatform defaultCxxPlatform, TargetGraph targetGraph, ImmutableSet<Flavor> flavors, BuildRuleResolver resolver, BuildTarget binary) throws NoSuchBuildTargetException { // Don't flavor genrule deps. if (targetGraph.get(binary).getDescription() instanceof AbstractGenruleDescription) { return resolver.requireRule(binary); } // Cxx targets must have one Platform Flavor set otherwise nothing gets compiled. if (flavors.contains(AppleDescriptions.FRAMEWORK_FLAVOR)) { flavors = ImmutableSet.<Flavor>builder() .addAll(flavors) .add(CxxDescriptionEnhancer.SHARED_FLAVOR) .build(); } flavors = ImmutableSet.copyOf( Sets.difference( flavors, ImmutableSet.of( AppleDescriptions.FRAMEWORK_FLAVOR, AppleBinaryDescription.APP_FLAVOR))); if (!cxxPlatformFlavorDomain.containsAnyOf(flavors)) { flavors = new ImmutableSet.Builder<Flavor>() .addAll(flavors) .add(defaultCxxPlatform.getFlavor()) .build(); } BuildTarget.Builder buildTargetBuilder = BuildTarget.builder(binary.getUnflavoredBuildTarget()).addAllFlavors(flavors); if (!(AppleLibraryDescription.LIBRARY_TYPE.getFlavor(flavors).isPresent())) { buildTargetBuilder.addAllFlavors(binary.getFlavors()); } else { buildTargetBuilder.addAllFlavors( Sets.difference( binary.getFlavors(), AppleLibraryDescription.LIBRARY_TYPE.getFlavors())); } BuildTarget buildTarget = buildTargetBuilder.build(); final TargetNode<?, ?> binaryTargetNode = targetGraph.get(buildTarget); if (binaryTargetNode.getDescription() instanceof AppleTestDescription) { return resolver.getRule(binary); } // If the binary target of the AppleBundle is an AppleLibrary then the build flavor // must be specified. if (binaryTargetNode.getDescription() instanceof AppleLibraryDescription && (Sets.intersection( AppleBundleDescription.SUPPORTED_LIBRARY_FLAVORS, buildTarget.getFlavors()).size() != 1)) { throw new HumanReadableException( "AppleExtension bundle [%s] must have exactly one of these flavors: [%s].", binaryTargetNode.getBuildTarget().toString(), Joiner.on(", ").join(AppleBundleDescription.SUPPORTED_LIBRARY_FLAVORS)); } if (!StripStyle.FLAVOR_DOMAIN.containsAnyOf(buildTarget.getFlavors())) { buildTarget = buildTarget.withAppendedFlavors(StripStyle.NON_GLOBAL_SYMBOLS.getFlavor()); } return resolver.requireRule(buildTarget); } private static BuildRuleParams getBundleParamsWithUpdatedDeps( final BuildRuleParams params, final BuildTarget originalBinaryTarget, final Set<BuildRule> newDeps) { // Remove the unflavored binary rule and add the flavored one instead. final Predicate<BuildRule> notOriginalBinaryRule = Predicates.not( BuildRules.isBuildRuleWithTarget(originalBinaryTarget)); return params.copyWithDeps( Suppliers.ofInstance( FluentIterable .from(params.getDeclaredDeps().get()) .filter(notOriginalBinaryRule) .append(newDeps) .toSortedSet(Ordering.natural())), Suppliers.ofInstance( FluentIterable .from(params.getExtraDeps().get()) .filter(notOriginalBinaryRule) .toSortedSet(Ordering.natural()))); } private static ImmutableMap<SourcePath, String> collectFirstLevelAppleDependencyBundles( ImmutableSortedSet<BuildRule> deps, AppleBundleDestinations destinations) { ImmutableMap.Builder<SourcePath, String> extensionBundlePaths = ImmutableMap.builder(); // We only care about the direct layer of dependencies. ExtensionBundles inside ExtensionBundles // do not get pulled in to the top-level Bundle. for (BuildRule rule : deps) { if (rule instanceof AppleBundle) { AppleBundle appleBundle = (AppleBundle) rule; Path outputPath = Preconditions.checkNotNull( appleBundle.getPathToOutput(), "Path cannot be null for AppleBundle [%s].", appleBundle); SourcePath sourcePath = new BuildTargetSourcePath( appleBundle.getBuildTarget(), outputPath); if (AppleBundleExtension.APPEX.toFileExtension().equals(appleBundle.getExtension()) || AppleBundleExtension.APP.toFileExtension().equals(appleBundle.getExtension())) { Path destinationPath; String platformName = appleBundle.getPlatformName(); if ((platformName.equals(ApplePlatform.WATCHOS.getName()) || platformName.equals(ApplePlatform.WATCHSIMULATOR.getName())) && appleBundle.getExtension().equals(AppleBundleExtension.APP.toFileExtension())) { destinationPath = destinations.getWatchAppPath(); } else if (appleBundle.isLegacyWatchApp()) { destinationPath = destinations.getResourcesPath(); } else { destinationPath = destinations.getPlugInsPath(); } extensionBundlePaths.put(sourcePath, destinationPath.toString()); } else if ( AppleBundleExtension.FRAMEWORK.toFileExtension().equals(appleBundle.getExtension())) { extensionBundlePaths.put( sourcePath, destinations.getFrameworksPath().toString()); } } } return extensionBundlePaths.build(); } /** * Strip flavors that only apply to a bundle from build targets that are passed to constituent * rules of the bundle, such as its associated binary, asset catalog, etc. */ private static BuildRuleParams stripBundleSpecificFlavors(BuildRuleParams params) { return params.copyWithBuildTarget( params.getBuildTarget().withoutFlavors(BUNDLE_SPECIFIC_FLAVORS)); } public static boolean flavorsDoNotAllowLinkerMapMode(BuildRuleParams params) { ImmutableSet<Flavor> flavors = params.getBuildTarget().getFlavors(); return flavors.contains(CxxCompilationDatabase.COMPILATION_DATABASE) || flavors.contains(CxxCompilationDatabase.UBER_COMPILATION_DATABASE) || flavors.contains(CxxDescriptionEnhancer.STATIC_FLAVOR) || flavors.contains(CxxDescriptionEnhancer.STATIC_PIC_FLAVOR) || flavors.contains(CxxDescriptionEnhancer.EXPORTED_HEADER_SYMLINK_TREE_FLAVOR) || flavors.contains(CxxDescriptionEnhancer.HEADER_SYMLINK_TREE_FLAVOR); } }
/* * Copyright 2015 Ghent University, Bayer CropScience. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jamesframework.ext.analysis; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Paths; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import mjson.Json; import org.jamesframework.core.problems.sol.Solution; /** * Groups results of an analysis performed by an {@link Analysis} object. The results can be accessed directly or * written to a JSON file (see {@link #writeJSON(String)} and {@link #writeJSON(String, JsonConverter)}) that can * then be loaded into R to be inspected and visualized using the james-analysis R package. * * @param <SolutionType> solution type of the analyzed problems * @author <a href="mailto:herman.debeukelaer@ugent.be">Herman De Beukelaer</a> */ public class AnalysisResults<SolutionType extends Solution> { // stores results: problem ID -> search ID -> list of search run results private final Map<String, Map<String, List<SearchRunResults<SolutionType>>>> results; /** * Create an empty results object. */ public AnalysisResults() { results = new HashMap<>(); } /** * Merge the given results into this results object. A copy of the newly added search * runs is made. * * @param otherResults other results to be merged into this results object * @return a reference to the updated results object */ public AnalysisResults<SolutionType> merge(AnalysisResults<SolutionType> otherResults){ otherResults.results.keySet().forEach(problemID -> { otherResults.results.get(problemID).keySet().forEach(searchID -> { List<SearchRunResults<SolutionType>> runs = otherResults.results.get(problemID).get(searchID); runs.forEach(run -> { // deep copy run SearchRunResults<SolutionType> runCopy = new SearchRunResults<>(run); // register in this results object registerSearchRun(problemID, searchID, runCopy); }); }); }); return this; } /** * Register results of a search run, specifying the IDs of the problem being solved and the applied search. * If no runs have been registered before for this combination of problem and search, new entries are created. * Else, this run is appended to the existing runs. * * @param problemID ID of the problem being solved * @param searchID ID of the applied search * @param run results of the search run */ public void registerSearchRun(String problemID, String searchID, SearchRunResults<SolutionType> run){ if(!results.containsKey(problemID)){ results.put(problemID, new HashMap<>()); } if(!results.get(problemID).containsKey(searchID)){ results.get(problemID).put(searchID, new ArrayList<>()); } results.get(problemID).get(searchID).add(run); } /** * Get the number of analyzed problems. * * @return number of analyzed problems */ public int getNumProblems(){ return results.size(); } /** * Get the IDs of the analyzed problems (unmodifiable view). * * @return IDs of analyzed problems */ public Set<String> getProblemIDs(){ return Collections.unmodifiableSet(results.keySet()); } /** * Get the number of different searches that have been applied to solve the problem with the given ID. * * @param problemID ID of the problem * @return number of different searches applied to solve the problem * @throws UnknownIDException if an unknown problem ID is given */ public int getNumSearches(String problemID){ if(!results.containsKey(problemID)){ throw new UnknownIDException("Unknown problem ID " + problemID + "."); } return results.get(problemID).size(); } /** * Get the IDs of the different searches that have been applied to solve the problem with the * given ID (unmodifiable view). * * @param problemID ID of the problem * @return IDs of different searches applied to solve the problem * @throws UnknownIDException if an unknown problem ID is given */ public Set<String> getSearchIDs(String problemID){ if(!results.containsKey(problemID)){ throw new UnknownIDException("Unknown problem ID " + problemID + "."); } return Collections.unmodifiableSet(results.get(problemID).keySet()); } /** * Get the number of performed runs of the given search when solving the given problem. * * @param problemID ID of the problem * @param searchID ID of the applied search * @return number of performed runs of the given search when solving the given problem * @throws UnknownIDException if an unknown problem or search ID is given */ public int getNumRuns(String problemID, String searchID){ if(!results.containsKey(problemID)){ throw new UnknownIDException("Unknown problem ID " + problemID + "."); } if(!results.get(problemID).containsKey(searchID)){ throw new UnknownIDException("Unknown search ID " + searchID + " for problem " + problemID + "."); } return results.get(problemID).get(searchID).size(); } /** * Get the results of the i-th performed run of the given search when solving the given problem. * * @param problemID ID of the problem * @param searchID ID of the applied search * @param i search run index * @return results of i-th run of the given search when solving the given problem * @throws UnknownIDException if an unknown problem or search ID is given * @throws IndexOutOfBoundsException if there is no i-th run for this search and problem */ public SearchRunResults<SolutionType> getRun(String problemID, String searchID, int i){ if(!results.containsKey(problemID)){ throw new UnknownIDException("Unknown problem ID " + problemID + "."); } if(!results.get(problemID).containsKey(searchID)){ throw new UnknownIDException("Unknown search ID " + searchID + " for problem " + problemID + "."); } return results.get(problemID).get(searchID).get(i); } /** * Write the results to a JSON file that can be loaded into R to be inspected and visualized using * the james-analysis R package. If the specified file already exists, it is overwritten. This method * only stores the evaluation values and update times for each search run, skipping the actual best * found solutions. If desired to store the solutions as well use {@link #writeJSON(String, JsonConverter)}. * * @param filePath path of the file to which the JSON output is written * @throws IOException if an I/O error occurs when writing to the file */ public void writeJSON(String filePath) throws IOException{ writeJSON(filePath, null); } /** * Write the results to a JSON file that can be loaded into R to be inspected and visualized using * the james-analysis R package. If the specified file already exists, it is overwritten. This method * stores the evaluation values, the update times and the actual best found solution for each search * run. The solutions are converted to a JSON representation using the given converter. If the latter * is <code>null</code>, the actual solutions are not stored in the output file. * * @param filePath path of the file to which the JSON output is written * @param solutionJsonConverter converts solutions to a JSON representation * @throws IOException if an I/O error occurs when writing to the file */ public void writeJSON(String filePath, JsonConverter<SolutionType> solutionJsonConverter) throws IOException{ /**************************************************/ /* STEP 1: Convert results to JSON representation */ /**************************************************/ Json resultsJson = Json.object(); // register problems results.forEach((problemID, searches) -> { Json problemJson = Json.object(); searches.forEach((searchID, runs) -> { Json searchJson = Json.array(); // register search runs runs.forEach(run -> { Json runJson = Json.object(); // register update times and values Json times = Json.array(run.getTimes().toArray()); Json values = Json.array(run.getValues().toArray()); runJson.set("times", times); runJson.set("values", values); // register best found solution, if a JSON converter is given if(solutionJsonConverter != null){ runJson.set("best.solution", solutionJsonConverter.toJson(run.getBestSolution())); } searchJson.add(runJson); }); problemJson.set(searchID, searchJson); }); resultsJson.set(problemID, problemJson); }); /*************************************/ /* STEP 2: Write JSON string to file */ /*************************************/ Files.write(Paths.get(filePath), Collections.singleton(resultsJson.toString())); } }
/* * DBeaver - Universal Database Manager * Copyright (C) 2010-2021 DBeaver Corp and others * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jkiss.dbeaver.ui.editors.sql; import org.eclipse.core.resources.IResource; import org.eclipse.core.runtime.CoreException; import org.eclipse.core.runtime.IProgressMonitor; import org.eclipse.core.runtime.IStatus; import org.eclipse.core.runtime.Status; import org.eclipse.core.runtime.jobs.IJobChangeEvent; import org.eclipse.core.runtime.jobs.JobChangeAdapter; import org.eclipse.jface.action.Action; import org.eclipse.jface.action.IContributionManager; import org.eclipse.jface.action.IMenuManager; import org.eclipse.jface.action.Separator; import org.eclipse.jface.text.BadLocationException; import org.eclipse.jface.text.Document; import org.eclipse.jface.text.IDocument; import org.eclipse.jface.text.IRegion; import org.eclipse.jface.text.source.IAnnotationModel; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.SashForm; import org.eclipse.swt.events.FocusEvent; import org.eclipse.swt.events.FocusListener; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Control; import org.eclipse.ui.IEditorInput; import org.eclipse.ui.IEditorSite; import org.eclipse.ui.IWorkbenchPartSite; import org.eclipse.ui.part.MultiPageEditorSite; import org.eclipse.ui.texteditor.IDocumentProvider; import org.jkiss.dbeaver.DBException; import org.jkiss.dbeaver.model.exec.DBCExecutionContext; import org.jkiss.dbeaver.model.exec.DBExecUtils; import org.jkiss.dbeaver.model.exec.compile.DBCCompileLog; import org.jkiss.dbeaver.model.exec.compile.DBCSourceHost; import org.jkiss.dbeaver.model.navigator.DBNDatabaseNode; import org.jkiss.dbeaver.model.preferences.DBPPropertySource; import org.jkiss.dbeaver.model.runtime.AbstractJob; import org.jkiss.dbeaver.model.runtime.DBRProgressMonitor; import org.jkiss.dbeaver.model.sql.SQLUtils; import org.jkiss.dbeaver.model.struct.DBSObject; import org.jkiss.dbeaver.runtime.DBWorkbench; import org.jkiss.dbeaver.ui.*; import org.jkiss.dbeaver.ui.controls.ObjectCompilerLogViewer; import org.jkiss.dbeaver.ui.controls.ProgressPageControl; import org.jkiss.dbeaver.ui.editors.DatabaseEditorUtils; import org.jkiss.dbeaver.ui.editors.IDatabaseEditorInput; import org.jkiss.dbeaver.ui.editors.IDatabasePostSaveProcessor; import org.jkiss.dbeaver.ui.editors.entity.EntityEditor; import org.jkiss.dbeaver.ui.editors.text.BaseTextDocumentProvider; import org.jkiss.dbeaver.ui.editors.text.DatabaseMarkerAnnotationModel; import org.jkiss.dbeaver.utils.RuntimeUtils; import java.lang.reflect.InvocationTargetException; import java.util.Map; /** * SQLEditorNested */ public abstract class SQLEditorNested<T extends DBSObject> extends SQLEditorBase implements IActiveWorkbenchPart, IRefreshablePart, DBCSourceHost, IDatabasePostSaveProcessor { private static final String SAVE_CONTEXT_COMPILE_PARAM = "object.compiled"; private EditorPageControl pageControl; private ObjectCompilerLogViewer compileLog; private Control editorControl; private SashForm editorSash; private boolean activated; public SQLEditorNested() { super(); setDocumentProvider(new ObjectDocumentProvider()); //setHasVerticalRuler(false); } public IDatabaseEditorInput getDatabaseEditorInput() { return (IDatabaseEditorInput)super.getEditorInput(); } @Override public T getSourceObject() { IEditorInput editorInput = getEditorInput(); if (!(editorInput instanceof IDatabaseEditorInput)) { return null; } return (T) ((IDatabaseEditorInput)editorInput).getDatabaseObject(); } @Override public DBCExecutionContext getExecutionContext() { IEditorInput editorInput = getEditorInput(); if (!(editorInput instanceof IDatabaseEditorInput)) { return null; } return ((IDatabaseEditorInput)editorInput).getExecutionContext(); } public DBPPropertySource getInputPropertySource() { return getDatabaseEditorInput().getPropertySource(); } @Override public void createPartControl(Composite parent) { pageControl = new EditorPageControl(parent, SWT.SHEET); boolean hasCompiler = getCompileCommandId() != null; if (hasCompiler) { editorSash = new SashForm(pageControl.createContentContainer(), SWT.VERTICAL | SWT.SMOOTH); super.createPartControl(editorSash); editorControl = editorSash.getChildren()[0]; compileLog = new ObjectCompilerLogViewer(editorSash, this, false); } else { super.createPartControl(pageControl.createContentContainer()); } // Create new or substitute progress control pageControl.createOrSubstituteProgressPanel(getSite()); pageControl.setInfo("Source"); if (hasCompiler) { editorSash.setWeights(new int[]{70, 30}); editorSash.setMaximizedControl(editorControl); } // Use focus to activate page control final Control editorControl = getEditorControl(); assert editorControl != null; editorControl.addFocusListener(new FocusListener() { @Override public void focusGained(FocusEvent e) { if (pageControl != null && !pageControl.isDisposed()) { pageControl.activate(true); } } @Override public void focusLost(FocusEvent e) { if (pageControl != null && !pageControl.isDisposed()) { pageControl.activate(false); } } }); } @Override public void doSave(final IProgressMonitor progressMonitor) { UIUtils.syncExec(() -> SQLEditorNested.super.doSave(progressMonitor)); } @Override public void runPostSaveCommands(Map<String, Object> context) { String compileCommandId = getCompileCommandId(); if (compileCommandId != null && context.get(SAVE_CONTEXT_COMPILE_PARAM) == null) { // Compile after save try { ActionUtils.runCommand(compileCommandId, getSite().getWorkbenchWindow()); } finally { context.put(SAVE_CONTEXT_COMPILE_PARAM, true); } } } @Override public void activatePart() { if (!activated) { reloadSyntaxRules(); activated = true; } } @Override public void deactivatePart() { } @Override public RefreshResult refreshPart(Object source, boolean force) { // Check if we are in saving process // If so then no refresh needed (source text was updated during save) IEditorSite editorSite = getEditorSite(); if (editorSite instanceof MultiPageEditorSite && ((MultiPageEditorSite) editorSite).getMultiPageEditor() instanceof EntityEditor && ((EntityEditor) ((MultiPageEditorSite) editorSite).getMultiPageEditor()).isSaveInProgress()) { return RefreshResult.IGNORED; } final IDocumentProvider documentProvider = getDocumentProvider(); if (documentProvider instanceof SQLEditorNested.ObjectDocumentProvider) { ((SQLEditorNested.ObjectDocumentProvider) documentProvider).sourceText = null; } if (force) { int caretOffset = getEditorControl().getCaretOffset(); super.setInput(getEditorInput()); // Try to keep cursor position if (caretOffset < getEditorControl().getCharCount()) { getEditorControl().setCaretOffset(caretOffset); } } reloadSyntaxRules(); return RefreshResult.REFRESHED; } protected String getCompileCommandId() { return null; } public boolean isDocumentLoaded() { final IDocumentProvider documentProvider = getDocumentProvider(); if (documentProvider instanceof SQLEditorNested.ObjectDocumentProvider) { return ((SQLEditorNested.ObjectDocumentProvider) documentProvider).sourceLoaded; } return true; } private class ObjectDocumentProvider extends BaseTextDocumentProvider { private String sourceText; private boolean sourceLoaded; @Override public boolean isReadOnly(Object element) { return SQLEditorNested.this.isReadOnly(); } @Override public boolean isModifiable(Object element) { return !SQLEditorNested.this.isReadOnly(); } @Override protected IDocument createDocument(Object element) throws CoreException { final Document document = new Document(); if (sourceText == null) { sourceText = SQLUtils.generateCommentLine(getDataSource(), "Loading '" + getEditorInput().getName() + "' source..."); document.set(sourceText); AbstractJob job = new AbstractJob("Load SQL source") { { setUser(true); } @Override protected IStatus run(DBRProgressMonitor monitor) { monitor.beginTask(getName(), 1); try { DBExecUtils.tryExecuteRecover(monitor, getDataSource(), param -> { try { sourceText = getSourceText(monitor); if (sourceText == null) { sourceText = SQLUtils.generateCommentLine(getDataSource(), "Empty source"); } } catch (DBException e) { throw new InvocationTargetException(e); } }); return Status.OK_STATUS; } catch (Exception e) { log.error(e); sourceText = "/* ERROR WHILE READING SOURCE:\n\n" + e.getMessage() + "\n*/"; return Status.CANCEL_STATUS; } finally { monitor.done(); } } }; job.addJobChangeListener(new JobChangeAdapter() { @Override public void done(IJobChangeEvent event) { UIUtils.asyncExec(() -> { SQLEditorNested.this.setInput(getEditorInput()); SQLEditorNested.this.reloadSyntaxRules(); }); super.done(event); } }); job.schedule(); } // Set text document.set(sourceText); sourceLoaded = true; return document; } @Override protected IAnnotationModel createAnnotationModel(Object element) throws CoreException { DBSObject databaseObject = getSourceObject(); DBNDatabaseNode node = DBWorkbench.getPlatform().getNavigatorModel().getNodeByObject(databaseObject); IResource resource = node == null || node.getOwnerProject() == null ? null : node.getOwnerProject().getEclipseProject(); if (resource != null) { return new DatabaseMarkerAnnotationModel(databaseObject, node, resource); } return super.createAnnotationModel(element); } @Override protected void doSaveDocument(IProgressMonitor monitor, Object element, IDocument document, boolean overwrite) throws CoreException { DBRProgressMonitor pm = RuntimeUtils.makeMonitor(monitor); pm.beginTask("Save nested editor", 1); try { setSourceText(pm, document.get()); } finally { pm.done(); } } } @Override public DBCCompileLog getCompileLog() { return compileLog; } @Override public void setCompileInfo(String message, boolean error) { pageControl.setInfo(message); } @Override public void positionSource(int line, int position) { try { final IRegion lineInfo = getTextViewer().getDocument().getLineInformation(line - 1); final int offset = lineInfo.getOffset() + position - 1; super.selectAndReveal(offset, 1); //textEditor.setFocus(); } catch (BadLocationException e) { log.warn(e); // do nothing } } @Override public void showCompileLog() { editorSash.setMaximizedControl(null); compileLog.layoutLog(); } protected abstract String getSourceText(DBRProgressMonitor monitor) throws DBException; protected abstract void setSourceText(DBRProgressMonitor monitor, String sourceText); protected void contributeEditorCommands(IContributionManager toolBarManager) { toolBarManager.add(ActionUtils.makeCommandContribution(getSite().getWorkbenchWindow(), SQLEditorCommands.CMD_OPEN_FILE)); toolBarManager.add(ActionUtils.makeCommandContribution(getSite().getWorkbenchWindow(), SQLEditorCommands.CMD_SAVE_FILE)); String compileCommandId = getCompileCommandId(); if (compileCommandId != null) { toolBarManager.add(new Separator()); toolBarManager.add(ActionUtils.makeCommandContribution(getSite().getWorkbenchWindow(), compileCommandId)); toolBarManager.add(new ViewLogAction()); } } @Override public void editorContextMenuAboutToShow(IMenuManager menu) { super.editorContextMenuAboutToShow(menu); menu.add(new Separator()); contributeEditorCommands(menu); } @Override public void doSaveAs() { saveToExternalFile(); } private class EditorPageControl extends ProgressPageControl { EditorPageControl(Composite parent, int style) { super(parent, style); } @Override public void fillCustomActions(IContributionManager contributionManager) { contributeEditorCommands(contributionManager); IWorkbenchPartSite site = getSite(); if (site != null) { DatabaseEditorUtils.contributeStandardEditorActions(site, contributionManager); } } } public class ViewLogAction extends Action { ViewLogAction() { super("View compile log", DBeaverIcons.getImageDescriptor(UIIcon.COMPILE_LOG)); //$NON-NLS-2$ } @Override public void run() { if (getTextViewer().getControl().isDisposed()) { return; } if (editorSash.getMaximizedControl() == null) { editorSash.setMaximizedControl(editorControl); } else { showCompileLog(); } } } }
/* * The Alluxio Open Foundation licenses this work under the Apache License, version 2.0 * (the "License"). You may not use this work except in compliance with the License, which is * available at www.apache.org/licenses/LICENSE-2.0 * * This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied, as more fully set forth in the License. * * See the NOTICE file distributed with this work for information regarding copyright ownership. */ package alluxio.server.ft; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertTrue; import alluxio.AlluxioURI; import alluxio.Configuration; import alluxio.Constants; import alluxio.PropertyKey; import alluxio.client.WriteType; import alluxio.client.block.AlluxioBlockStore; import alluxio.client.file.FileSystem; import alluxio.client.file.FileSystemTestUtils; import alluxio.client.file.URIStatus; import alluxio.client.file.options.CreateFileOptions; import alluxio.client.file.options.DeleteOptions; import alluxio.collections.Pair; import alluxio.exception.AlluxioException; import alluxio.hadoop.HadoopClientTestUtils; import alluxio.master.MultiMasterLocalAlluxioCluster; import alluxio.master.block.BlockMaster; import alluxio.testutils.BaseIntegrationTest; import alluxio.thrift.CommandType; import alluxio.thrift.RegisterWorkerTOptions; import alluxio.util.CommonUtils; import alluxio.util.WaitForOptions; import alluxio.util.io.PathUtils; import jersey.repackaged.com.google.common.collect.Lists; import org.junit.After; import org.junit.Assume; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Ignore; import org.junit.Test; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.concurrent.TimeoutException; @Ignore("https://alluxio.atlassian.net/browse/ALLUXIO-2818") public class MasterFaultToleranceIntegrationTest extends BaseIntegrationTest { // Fail if the cluster doesn't come up after this amount of time. private static final int CLUSTER_WAIT_TIMEOUT_MS = 120 * Constants.SECOND_MS; private static final long WORKER_CAPACITY_BYTES = 10000; private static final int BLOCK_SIZE = 30; private static final int MASTERS = 5; private MultiMasterLocalAlluxioCluster mMultiMasterLocalAlluxioCluster = null; private FileSystem mFileSystem = null; @BeforeClass public static void beforeClass() { // Skip hadoop 1 because hadoop 1's RPC cannot be interrupted properly which makes it // hard to shutdown a cluster. // TODO(peis): Figure out a better way to support hadoop 1. Assume.assumeFalse(HadoopClientTestUtils.isHadoop1x()); } @After public final void after() throws Exception { mMultiMasterLocalAlluxioCluster.stop(); } @Before public final void before() throws Exception { // TODO(gpang): Implement multi-master cluster as a resource. mMultiMasterLocalAlluxioCluster = new MultiMasterLocalAlluxioCluster(MASTERS); mMultiMasterLocalAlluxioCluster.initConfiguration(); Configuration.set(PropertyKey.WORKER_MEMORY_SIZE, WORKER_CAPACITY_BYTES); Configuration.set(PropertyKey.USER_BLOCK_SIZE_BYTES_DEFAULT, BLOCK_SIZE); Configuration.set(PropertyKey.MASTER_JOURNAL_TAILER_SHUTDOWN_QUIET_WAIT_TIME_MS, 100); Configuration.set(PropertyKey.MASTER_JOURNAL_CHECKPOINT_PERIOD_ENTRIES, 2); Configuration.set(PropertyKey.MASTER_JOURNAL_LOG_SIZE_BYTES_MAX, 32); mMultiMasterLocalAlluxioCluster.start(); mFileSystem = mMultiMasterLocalAlluxioCluster.getClient(); } /** * Creates 10 files in the folder. * * @param folderName the folder name to create * @param answer the results, the mapping from file id to file path */ private void faultTestDataCreation(AlluxioURI folderName, List<Pair<Long, AlluxioURI>> answer) throws IOException, AlluxioException { mFileSystem.createDirectory(folderName); answer.add(new Pair<>(mFileSystem.getStatus(folderName).getFileId(), folderName)); for (int k = 0; k < 10; k++) { AlluxioURI path = new AlluxioURI(PathUtils.concatPath(folderName, folderName.toString().substring(1) + k)); mFileSystem.createFile(path).close(); answer.add(new Pair<>(mFileSystem.getStatus(path).getFileId(), path)); } } /** * Tells if the results can match the answers. * * @param answers the correct results */ private void faultTestDataCheck(List<Pair<Long, AlluxioURI>> answers) throws IOException, AlluxioException { List<String> files = FileSystemTestUtils.listFiles(mFileSystem, AlluxioURI.SEPARATOR); Collections.sort(files); assertEquals(answers.size(), files.size()); for (Pair<Long, AlluxioURI> answer : answers) { assertEquals(answer.getSecond().toString(), mFileSystem.getStatus(answer.getSecond()).getPath()); assertEquals(answer.getFirst().longValue(), mFileSystem.getStatus(answer.getSecond()).getFileId()); } } /** * Wait for a number of workers to register. This call will block until the block master * detects the required number of workers or if the timeout is exceeded. * * @param store the block store object which references the correct block master * @param numWorkers the number of workers to wait for * @param timeoutMs the number of milliseconds to wait before timing out */ private void waitForWorkerRegistration(final AlluxioBlockStore store, final int numWorkers, int timeoutMs) throws TimeoutException, InterruptedException { CommonUtils.waitFor("Worker to register.", () -> { try { return store.getEligibleWorkers().size() >= numWorkers; } catch (Exception e) { return false; } }, WaitForOptions.defaults().setTimeoutMs(timeoutMs)); } @Test public void createFileFault() throws Exception { int clients = 10; List<Pair<Long, AlluxioURI>> answer = new ArrayList<>(); for (int k = 0; k < clients; k++) { faultTestDataCreation(new AlluxioURI("/data" + k), answer); } faultTestDataCheck(answer); for (int kills = 0; kills < MASTERS - 1; kills++) { assertTrue(mMultiMasterLocalAlluxioCluster.stopLeader()); mMultiMasterLocalAlluxioCluster.waitForNewMaster(CLUSTER_WAIT_TIMEOUT_MS); waitForWorkerRegistration(AlluxioBlockStore.create(), 1, CLUSTER_WAIT_TIMEOUT_MS); faultTestDataCheck(answer); faultTestDataCreation(new AlluxioURI("/data_kills_" + kills), answer); } } @Test public void deleteFileFault() throws Exception { // Kill leader -> create files -> kill leader -> delete files, repeat. List<Pair<Long, AlluxioURI>> answer = new ArrayList<>(); for (int kills = 0; kills < MASTERS - 1; kills++) { assertTrue(mMultiMasterLocalAlluxioCluster.stopLeader()); mMultiMasterLocalAlluxioCluster.waitForNewMaster(CLUSTER_WAIT_TIMEOUT_MS); waitForWorkerRegistration(AlluxioBlockStore.create(), 1, CLUSTER_WAIT_TIMEOUT_MS); if (kills % 2 != 0) { // Delete files. faultTestDataCheck(answer); // We can not call mFileSystem.delete(mFileSystem.open(new // AlluxioURI(AlluxioURI.SEPARATOR))) because root node can not be deleted. for (URIStatus file : mFileSystem.listStatus(new AlluxioURI(AlluxioURI.SEPARATOR))) { mFileSystem.delete(new AlluxioURI(file.getPath()), DeleteOptions.defaults().setRecursive(true)); } answer.clear(); faultTestDataCheck(answer); } else { // Create files. assertEquals(0, answer.size()); faultTestDataCheck(answer); faultTestDataCreation(new AlluxioURI(PathUtils.concatPath( AlluxioURI.SEPARATOR, "data_" + kills)), answer); faultTestDataCheck(answer); } } } @Test public void createFiles() throws Exception { int clients = 10; CreateFileOptions option = CreateFileOptions.defaults().setBlockSizeBytes(1024).setWriteType(WriteType.THROUGH); for (int k = 0; k < clients; k++) { mFileSystem.createFile(new AlluxioURI(AlluxioURI.SEPARATOR + k), option).close(); } List<String> files = FileSystemTestUtils.listFiles(mFileSystem, AlluxioURI.SEPARATOR); assertEquals(clients, files.size()); Collections.sort(files); for (int k = 0; k < clients; k++) { assertEquals(AlluxioURI.SEPARATOR + k, files.get(k)); } } @Test public void killStandby() throws Exception { // If standby masters are killed(or node failure), current leader should not be affected and the // cluster should run properly. int leaderIndex = mMultiMasterLocalAlluxioCluster.getLeaderIndex(); assertNotEquals(-1, leaderIndex); List<Pair<Long, AlluxioURI>> answer = new ArrayList<>(); for (int k = 0; k < 5; k++) { faultTestDataCreation(new AlluxioURI("/data" + k), answer); } faultTestDataCheck(answer); for (int kills = 0; kills < MASTERS - 1; kills++) { assertTrue(mMultiMasterLocalAlluxioCluster.stopStandby()); CommonUtils.sleepMs(Constants.SECOND_MS * 2); // Leader should not change. assertEquals(leaderIndex, mMultiMasterLocalAlluxioCluster.getLeaderIndex()); // Cluster should still work. faultTestDataCheck(answer); faultTestDataCreation(new AlluxioURI("/data_kills_" + kills), answer); } } @Test public void workerReRegister() throws Exception { AlluxioBlockStore store = AlluxioBlockStore.create(); assertEquals(WORKER_CAPACITY_BYTES, store.getCapacityBytes()); for (int kills = 0; kills < MASTERS - 1; kills++) { assertTrue(mMultiMasterLocalAlluxioCluster.stopLeader()); mMultiMasterLocalAlluxioCluster.waitForNewMaster(CLUSTER_WAIT_TIMEOUT_MS); waitForWorkerRegistration(store, 1, 1 * Constants.MINUTE_MS); // If worker is successfully re-registered, the capacity bytes should not change. long capacityFound = store.getCapacityBytes(); assertEquals(WORKER_CAPACITY_BYTES, capacityFound); } } @Test public void failoverWorkerRegister() throws Exception { // Stop the default cluster. after(); // Create a new cluster, with no workers initially final MultiMasterLocalAlluxioCluster cluster = new MultiMasterLocalAlluxioCluster(2, 0); cluster.initConfiguration(); cluster.start(); try { // Get the first block master BlockMaster blockMaster1 = cluster.getLocalAlluxioMaster().getMasterProcess().getMaster(BlockMaster.class); // Register worker 1 long workerId1a = blockMaster1.getWorkerId(new alluxio.wire.WorkerNetAddress().setHost("host1")); blockMaster1.workerRegister(workerId1a, Collections.EMPTY_LIST, Collections.EMPTY_MAP, Collections.EMPTY_MAP, Collections.EMPTY_MAP, new RegisterWorkerTOptions()); // Register worker 2 long workerId2a = blockMaster1.getWorkerId(new alluxio.wire.WorkerNetAddress().setHost("host2")); blockMaster1.workerRegister(workerId2a, Collections.EMPTY_LIST, Collections.EMPTY_MAP, Collections.EMPTY_MAP, Collections.EMPTY_MAP, new RegisterWorkerTOptions()); assertEquals(2, blockMaster1.getWorkerCount()); // Worker heartbeats should return "Nothing" assertEquals(CommandType.Nothing, blockMaster1 .workerHeartbeat(workerId1a, Collections.EMPTY_MAP, Collections.EMPTY_LIST, Collections.EMPTY_MAP, Lists.newArrayList()).getCommandType()); assertEquals(CommandType.Nothing, blockMaster1 .workerHeartbeat(workerId2a, Collections.EMPTY_MAP, Collections.EMPTY_LIST, Collections.EMPTY_MAP, Lists.newArrayList()).getCommandType()); assertTrue(cluster.stopLeader()); cluster.waitForNewMaster(CLUSTER_WAIT_TIMEOUT_MS); // Get the new block master, after the failover BlockMaster blockMaster2 = cluster.getLocalAlluxioMaster().getMasterProcess() .getMaster(BlockMaster.class); // Worker 2 tries to heartbeat (with original id), and should get "Register" in response. assertEquals(CommandType.Register, blockMaster2 .workerHeartbeat(workerId2a, Collections.EMPTY_MAP, Collections.EMPTY_LIST, Collections.EMPTY_MAP, Lists.newArrayList()).getCommandType()); // Worker 2 re-registers (and gets a new worker id) long workerId2b = blockMaster2.getWorkerId(new alluxio.wire.WorkerNetAddress().setHost("host2")); blockMaster2.workerRegister(workerId2b, Collections.EMPTY_LIST, Collections.EMPTY_MAP, Collections.EMPTY_MAP, Collections.EMPTY_MAP, new RegisterWorkerTOptions()); // Worker 1 tries to heartbeat (with original id), and should get "Register" in response. assertEquals(CommandType.Register, blockMaster2 .workerHeartbeat(workerId1a, Collections.EMPTY_MAP, Collections.EMPTY_LIST, Collections.EMPTY_MAP, Lists.newArrayList()).getCommandType()); // Worker 1 re-registers (and gets a new worker id) long workerId1b = blockMaster2.getWorkerId(new alluxio.wire.WorkerNetAddress().setHost("host1")); blockMaster2.workerRegister(workerId1b, Collections.EMPTY_LIST, Collections.EMPTY_MAP, Collections.EMPTY_MAP, Collections.EMPTY_MAP, new RegisterWorkerTOptions()); } finally { cluster.stop(); } // Start the default cluster. before(); } }
/* * Copyright (c) 2014, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.identity.mgt; import org.apache.axis2.context.MessageContext; import org.apache.commons.collections.MapUtils; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.wso2.carbon.identity.base.IdentityException; import org.wso2.carbon.identity.core.AbstractIdentityUserOperationEventListener; import org.wso2.carbon.identity.core.model.IdentityErrorMsgContext; import org.wso2.carbon.identity.core.util.IdentityCoreConstants; import org.wso2.carbon.identity.core.util.IdentityTenantUtil; import org.wso2.carbon.identity.core.util.IdentityUtil; import org.wso2.carbon.identity.mgt.beans.UserIdentityMgtBean; import org.wso2.carbon.identity.mgt.beans.VerificationBean; import org.wso2.carbon.identity.mgt.config.Config; import org.wso2.carbon.identity.mgt.config.ConfigBuilder; import org.wso2.carbon.identity.mgt.config.ConfigType; import org.wso2.carbon.identity.mgt.config.StorageType; import org.wso2.carbon.identity.mgt.constants.IdentityMgtConstants; import org.wso2.carbon.identity.mgt.dto.NotificationDataDTO; import org.wso2.carbon.identity.mgt.dto.UserDTO; import org.wso2.carbon.identity.mgt.dto.UserIdentityClaimsDO; import org.wso2.carbon.identity.mgt.dto.UserRecoveryDTO; import org.wso2.carbon.identity.mgt.dto.UserRecoveryDataDO; import org.wso2.carbon.identity.mgt.internal.IdentityMgtServiceComponent; import org.wso2.carbon.identity.mgt.mail.Notification; import org.wso2.carbon.identity.mgt.mail.NotificationBuilder; import org.wso2.carbon.identity.mgt.mail.NotificationData; import org.wso2.carbon.identity.mgt.mail.TransportHeader; import org.wso2.carbon.identity.mgt.policy.PolicyRegistry; import org.wso2.carbon.identity.mgt.policy.PolicyViolationException; import org.wso2.carbon.identity.mgt.store.UserIdentityDataStore; import org.wso2.carbon.identity.mgt.util.UserIdentityManagementUtil; import org.wso2.carbon.identity.mgt.util.Utils; import org.wso2.carbon.registry.core.RegistryConstants; import org.wso2.carbon.registry.core.exceptions.RegistryException; import org.wso2.carbon.registry.core.session.UserRegistry; import org.wso2.carbon.user.core.UserCoreConstants; import org.wso2.carbon.user.core.UserStoreException; import org.wso2.carbon.user.core.UserStoreManager; import org.wso2.carbon.user.core.util.UserCoreUtil; import org.wso2.carbon.utils.multitenancy.MultitenantConstants; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; /** * This is an implementation of UserOperationEventListener. This defines * additional operations * for some of the core user management operations */ public class IdentityMgtEventListener extends AbstractIdentityUserOperationEventListener { /* * The thread local variable to hold data with scope only to that variable. * This is to pass data from doPreX() method to doPostX() and to avoid * infinite loops. */ private static final Log log = LogFactory.getLog(IdentityMgtEventListener.class); private static final String EMPTY_PASSWORD_USED = "EmptyPasswordUsed"; private static final String USER_IDENTITY_DO = "UserIdentityDO"; private static final String EMAIL_NOTIFICATION_TYPE = "EMAIL"; private static final String UNLOCK_ADMIN_SYS_PROP = "unlockAdmin"; private static final String PASSWORD_INVALID = "PasswordInvalid"; PolicyRegistry policyRegistry = null; private UserIdentityDataStore module; private IdentityMgtConfig identityMgtConfig; // Set of thread local variable names private static final String DO_PRE_AUTHENTICATE = "doPreAuthenticate"; private static final String DO_POST_AUTHENTICATE = "doPostAuthenticate"; private static final String DO_POST_ADD_USER = "doPostAddUser"; private static final String DO_PRE_SET_USER_CLAIM_VALUES = "doPreSetUserClaimValues"; private static final String DO_PRE_UPDATE_CREDENTIAL_BY_ADMIN = "doPreUpdateCredentialByAdmin"; private static final String DO_PRE_UPDATE_CREDENTIAL = "doPreUpdateCredential"; private static final String DO_POST_UPDATE_CREDENTIAL = "doPostUpdateCredential"; private static final String ASK_PASSWORD_FEATURE_IS_DISABLED = "Ask Password Feature is disabled"; private static final String INVALID_OPERATION = "InvalidOperation"; public IdentityMgtEventListener() { identityMgtConfig = IdentityMgtConfig.getInstance(); // Get the policy registry with the loaded policies. policyRegistry = identityMgtConfig.getPolicyRegistry(); module = IdentityMgtConfig.getInstance().getIdentityDataStore(); String isAdminUnlockSysProp = System.getProperty(UNLOCK_ADMIN_SYS_PROP); // If the system property unlockAdmin is set, then admin account will be unlocked if (StringUtils.isNotBlank(isAdminUnlockSysProp) && Boolean.parseBoolean(isAdminUnlockSysProp)) { log.info("unlockAdmin system property is defined. Hence unlocking admin account"); unlockAdmin(); } } /** * This method will unlock the admin account */ private void unlockAdmin() { String adminUserName = IdentityMgtServiceComponent.getRealmService().getBootstrapRealmConfiguration().getAdminUserName(); try { if (isEnable()) { UserStoreManager userStoreMng = IdentityMgtServiceComponent.getRealmService() .getBootstrapRealm().getUserStoreManager(); Map<String, String> claimMap = new HashMap<String, String>(); claimMap.put(UserIdentityDataStore.ACCOUNT_LOCK, Boolean.toString(false)); claimMap.put(UserIdentityDataStore.ACCOUNT_DISABLED, Boolean.toString(false)); // Directly "do" method of this listener is called because at the time of this execution, // this listener or any other listener may have no registered. doPreSetUserClaimValues(adminUserName, claimMap, null, userStoreMng); } } catch (UserStoreException e) { log.error("Error while unlocking admin account", e); } } /** * What is this ? */ @Override public int getExecutionOrderId() { int orderId = getOrderId(); if (orderId != IdentityCoreConstants.EVENT_LISTENER_ORDER_ID) { return orderId; } return 50; } /** * This method checks if the user account exist or is locked. If the account is * locked, the authentication process will be terminated after this method * returning false. */ @Override public boolean doPreAuthenticate(String userName, Object credential, UserStoreManager userStoreManager) throws UserStoreException { if (!isEnable()) { return true; } // Top level try and finally blocks are used to unset thread local variables try { if (!IdentityUtil.threadLocalProperties.get().containsKey(DO_PRE_AUTHENTICATE)) { IdentityUtil.threadLocalProperties.get().put(DO_PRE_AUTHENTICATE, true); if (log.isDebugEnabled()) { log.debug("Pre authenticator is called in IdentityMgtEventListener"); } IdentityUtil.clearIdentityErrorMsg(); IdentityMgtConfig config = IdentityMgtConfig.getInstance(); if (!config.isEnableAuthPolicy()) { return true; } String domainName = userStoreManager.getRealmConfiguration().getUserStoreProperty(UserCoreConstants.RealmConfig.PROPERTY_DOMAIN_NAME); String usernameWithDomain = UserCoreUtil.addDomainToName(userName, domainName); boolean isUserExistInCurrentDomain = userStoreManager.isExistingUser(usernameWithDomain); if (!isUserExistInCurrentDomain) { IdentityErrorMsgContext customErrorMessageContext = new IdentityErrorMsgContext(UserCoreConstants.ErrorCode.USER_DOES_NOT_EXIST); IdentityUtil.setIdentityErrorMsg(customErrorMessageContext); if (log.isDebugEnabled()) { log.debug("Username :" + userName + "does not exists in the system, ErrorCode :" + UserCoreConstants.ErrorCode.USER_DOES_NOT_EXIST); } if (config.isAuthPolicyAccountExistCheck()) { throw new UserStoreException(UserCoreConstants.ErrorCode.USER_DOES_NOT_EXIST); } } else { UserIdentityClaimsDO userIdentityDTO = module.load(userName, userStoreManager); if (userIdentityDTO == null) { return true; } //If account is disabled, user should not be able to log in if (userIdentityDTO.getIsAccountDisabled()) { IdentityErrorMsgContext customErrorMessageContext = new IdentityErrorMsgContext( IdentityCoreConstants.USER_ACCOUNT_DISABLED); IdentityUtil.setIdentityErrorMsg(customErrorMessageContext); String errorMsg = "User account is disabled for user : " + userName; log.warn(errorMsg); throw new UserStoreException(IdentityCoreConstants.USER_ACCOUNT_DISABLED_ERROR_CODE + " " + errorMsg); } // if the account is locked, should not be able to log in if (userIdentityDTO.isAccountLocked()) { // If unlock time is specified then unlock the account. if ((userIdentityDTO.getUnlockTime() != 0) && (System.currentTimeMillis() >= userIdentityDTO.getUnlockTime())) { userIdentityDTO.getUserDataMap().put(UserIdentityDataStore.ACCOUNT_LOCKED_REASON, ""); userIdentityDTO.setAccountLock(false); userIdentityDTO.setUnlockTime(0); try { module.store(userIdentityDTO, userStoreManager); } catch (IdentityException e) { throw new UserStoreException( "Error while saving user store data for user : " + userName, e); } } else { IdentityErrorMsgContext customErrorMessageContext = new IdentityErrorMsgContext( UserCoreConstants.ErrorCode.USER_IS_LOCKED + ":" + userIdentityDTO.getUserDataMap(). get(UserIdentityDataStore.ACCOUNT_LOCKED_REASON), userIdentityDTO.getFailAttempts(), config.getAuthPolicyMaxLoginAttempts()); if (IdentityMgtConstants.LockedReason.MAX_ATTEMTS_EXCEEDED.toString().equals(userIdentityDTO .getUserDataMap().get(UserIdentityDataStore.ACCOUNT_LOCKED_REASON))) { customErrorMessageContext .setFailedLoginAttempts(config.getAuthPolicyMaxLoginAttempts()); } IdentityUtil.setIdentityErrorMsg(customErrorMessageContext); String errorMsg = "User account is locked for user : " + userName + ". cannot login until the account is unlocked "; log.warn(errorMsg); throw new UserStoreException(UserCoreConstants.ErrorCode.USER_IS_LOCKED + " " + errorMsg); } } } } return true; } finally { // remove thread local variable IdentityUtil.threadLocalProperties.get().remove(DO_PRE_AUTHENTICATE); } } /** * This method locks the accounts after a configured number of * authentication failure attempts. And unlocks accounts based on successful * authentications. */ @Override public boolean doPostAuthenticate(String userName, boolean authenticated, UserStoreManager userStoreManager) throws UserStoreException { if (!isEnable()) { return true; } IdentityUtil.threadLocalProperties.get().remove(IdentityCoreConstants.USER_ACCOUNT_STATE); String domainName = userStoreManager.getRealmConfiguration().getUserStoreProperty(UserCoreConstants.RealmConfig.PROPERTY_DOMAIN_NAME); String usernameWithDomain = IdentityUtil.addDomainToName(userName, domainName); boolean isUserExistInCurrentDomain = userStoreManager.isExistingUser(usernameWithDomain); if (authenticated && isUserExistInCurrentDomain) { if (isUserExistInCurrentDomain) { UserIdentityClaimsDO userIdentityDTO = module.load(userName, userStoreManager); userIdentityDTO.setLastLogonTime(System.currentTimeMillis()); try { module.store(userIdentityDTO, userStoreManager); } catch (IdentityException e) { throw new UserStoreException(String.format("Error while saving user store data : %s for user : %s.", UserIdentityDataStore.LAST_LOGON_TIME, userName), e); } } } // Top level try and finally blocks are used to unset thread local variables try { if (!IdentityUtil.threadLocalProperties.get().containsKey(DO_POST_AUTHENTICATE)) { IdentityUtil.threadLocalProperties.get().put(DO_POST_AUTHENTICATE, true); if (log.isDebugEnabled()) { log.debug("Post authenticator is called in IdentityMgtEventListener"); } IdentityMgtConfig config = IdentityMgtConfig.getInstance(); if (!config.isEnableAuthPolicy()) { return true; } UserIdentityClaimsDO userIdentityDTO = module.load(userName, userStoreManager); if (userIdentityDTO == null) { userIdentityDTO = new UserIdentityClaimsDO(userName); userIdentityDTO.setTenantId(userStoreManager.getTenantId()); } boolean userOTPEnabled = userIdentityDTO.getOneTimeLogin(); // One time password check if (authenticated && config.isAuthPolicyOneTimePasswordCheck() && (!userStoreManager.isReadOnly()) && userOTPEnabled) { // reset password of the user and notify user of the new password String password = new String(UserIdentityManagementUtil.generateTemporaryPassword()); userStoreManager.updateCredentialByAdmin(userName, password); // Get email user claim value String email = userStoreManager.getUserClaimValue(userName, UserCoreConstants.ClaimTypeURIs.EMAIL_ADDRESS, null); if (StringUtils.isBlank(email)) { throw new UserStoreException("No user email provided for user : " + userName); } List<NotificationSendingModule> notificationModules = config.getNotificationSendingModules(); if (notificationModules != null) { NotificationDataDTO notificationData = new NotificationDataDTO(); if (MessageContext.getCurrentMessageContext() != null && MessageContext.getCurrentMessageContext().getProperty( MessageContext.TRANSPORT_HEADERS) != null) { Map<String, String> transportHeaderMap = (Map) MessageContext.getCurrentMessageContext() .getProperty(MessageContext.TRANSPORT_HEADERS); if (MapUtils.isNotEmpty(transportHeaderMap)) { TransportHeader[] transportHeadersArray = new TransportHeader[transportHeaderMap.size()]; int i = 0; for (Map.Entry<String, String> entry : transportHeaderMap.entrySet()) { TransportHeader transportHeader = new TransportHeader(); transportHeader.setHeaderName(entry.getKey()); transportHeader.setHeaderValue(entry.getValue()); transportHeadersArray[i] = transportHeader; ++i; } notificationData.setTransportHeaders(transportHeadersArray); } } NotificationData emailNotificationData = new NotificationData(); String emailTemplate = null; int tenantId = userStoreManager.getTenantId(); String firstName = null; String userStoreDomain = userStoreManager.getRealmConfiguration() .getUserStoreProperty(UserCoreConstants.RealmConfig.PROPERTY_DOMAIN_NAME); String domainSpecificUserName = UserCoreUtil.addDomainToName(userName, userStoreDomain); String tenantDomain = IdentityTenantUtil.getTenantDomain(userStoreManager.getTenantId()); try { firstName = Utils.getClaimFromUserStoreManager(domainSpecificUserName, tenantId, UserCoreConstants.ClaimTypeURIs.GIVEN_NAME); } catch (IdentityException e2) { throw new UserStoreException("Could not load user given name", e2); } emailNotificationData.setTagData("first-name", firstName); emailNotificationData.setTagData("user-name", userName); emailNotificationData.setTagData("otp-password", password); emailNotificationData.setTagData("userstore-domain", userStoreDomain); emailNotificationData.setTagData("tenant-domain", tenantDomain); emailNotificationData.setSendTo(email); Config emailConfig = null; ConfigBuilder configBuilder = ConfigBuilder.getInstance(); try { emailConfig = configBuilder.loadConfiguration(ConfigType.EMAIL, StorageType.REGISTRY, tenantId); } catch (Exception e1) { throw new UserStoreException( "Could not load the email template configuration for user : " + userName, e1); } emailTemplate = emailConfig.getProperty("otp"); Notification emailNotification = null; try { emailNotification = NotificationBuilder.createNotification(EMAIL_NOTIFICATION_TYPE, emailTemplate, emailNotificationData); } catch (Exception e) { throw new UserStoreException( "Could not create the email notification for template: " + emailTemplate, e); } NotificationSender sender = new NotificationSender(); for (NotificationSendingModule notificationSendingModule : notificationModules) { if (IdentityMgtConfig.getInstance().isNotificationInternallyManaged()) { notificationSendingModule.setNotificationData(notificationData); notificationSendingModule.setNotification(emailNotification); sender.sendNotification(notificationSendingModule); notificationData.setNotificationSent(true); } } } else { throw new UserStoreException("No notification modules configured"); } } // Password expire check. Not for OTP enabled users. if (authenticated && config.isAuthPolicyExpirePasswordCheck() && !userOTPEnabled && (!userStoreManager.isReadOnly())) { // TODO - password expire impl // Refactor adduser and change password api to stamp the time // Check user's expire time in the claim // if expired redirect to change password // else pass through } if (!authenticated && config.isAuthPolicyAccountLockOnFailure()) { // reading the max allowed #of failure attempts if (isUserExistInCurrentDomain) { userIdentityDTO.setFailAttempts(); if (userIdentityDTO.getFailAttempts() >= config.getAuthPolicyMaxLoginAttempts()) { log.info("User, " + userName + " has exceed the max failed login attempts. " + "User account would be locked"); IdentityErrorMsgContext customErrorMessageContext = new IdentityErrorMsgContext (UserCoreConstants.ErrorCode.USER_IS_LOCKED + ":" + IdentityMgtConstants.LockedReason.MAX_ATTEMTS_EXCEEDED.toString(), userIdentityDTO.getFailAttempts(), config.getAuthPolicyMaxLoginAttempts()); IdentityUtil.setIdentityErrorMsg(customErrorMessageContext); IdentityUtil.threadLocalProperties.get().put(IdentityCoreConstants.USER_ACCOUNT_STATE, UserCoreConstants.ErrorCode.USER_IS_LOCKED); if (log.isDebugEnabled()) { log.debug("Username :" + userName + "Exceeded the maximum login attempts. User locked, ErrorCode :" + UserCoreConstants.ErrorCode.USER_IS_LOCKED); } userIdentityDTO.getUserDataMap().put(UserIdentityDataStore.ACCOUNT_LOCKED_REASON, IdentityMgtConstants.LockedReason.MAX_ATTEMTS_EXCEEDED.toString()); userIdentityDTO.setAccountLock(true); userIdentityDTO.setFailAttempts(0); // lock time from the config int lockTime = IdentityMgtConfig.getInstance().getAuthPolicyLockingTime(); if (lockTime != 0) { userIdentityDTO.setUnlockTime(System.currentTimeMillis() + (lockTime * 60 * 1000L)); } } else { IdentityErrorMsgContext customErrorMessageContext = new IdentityErrorMsgContext(UserCoreConstants.ErrorCode.INVALID_CREDENTIAL, userIdentityDTO.getFailAttempts(), config.getAuthPolicyMaxLoginAttempts()); IdentityUtil.setIdentityErrorMsg(customErrorMessageContext); if (log.isDebugEnabled()) { log.debug("Username :" + userName + "Invalid Credential, ErrorCode :" + UserCoreConstants.ErrorCode.INVALID_CREDENTIAL); } } try { module.store(userIdentityDTO, userStoreManager); } catch (IdentityException e) { throw new UserStoreException("Error while saving user store data for user : " + userName, e); } } else { if (log.isDebugEnabled()) { log.debug("User, " + userName + " is not exists in " + domainName); } } } else { // if the account was locked due to account verification process, // the unlock the account and reset the number of failedAttempts if (userIdentityDTO.isAccountLocked() || userIdentityDTO.getFailAttempts() > 0 || userIdentityDTO.getAccountLock()) { userIdentityDTO.getUserDataMap().put(UserIdentityDataStore.ACCOUNT_LOCKED_REASON, ""); userIdentityDTO.setAccountLock(false); userIdentityDTO.setFailAttempts(0); userIdentityDTO.setUnlockTime(0); try { module.store(userIdentityDTO, userStoreManager); } catch (IdentityException e) { throw new UserStoreException("Error while saving user store data for user : " + userName, e); } } } } return true; } finally { // Remove thread local variable IdentityUtil.threadLocalProperties.get().remove(DO_POST_AUTHENTICATE); } } /** * This method will set the default/random password if the password provided is * null. The thread local parameter EMPTY_PASSWORD_USED will be used to * track if the password empty in the doPostAddUser. * This method will filter the security question URIs from claims and put those * to the thread local properties. */ @Override public boolean doPreAddUser(String userName, Object credential, String[] roleList, Map<String, String> claims, String profile, UserStoreManager userStoreManager) throws UserStoreException { if (!isEnable()) { if (credential == null || StringUtils.isBlank(credential.toString())) { log.error("Identity Management listener is disabled"); throw new UserStoreException(PASSWORD_INVALID + ASK_PASSWORD_FEATURE_IS_DISABLED); } return true; } if (log.isDebugEnabled()) { log.debug("Pre add user is called in IdentityMgtEventListener"); } //Removing existing thread local before setting IdentityUtil.threadLocalProperties.get().remove(EMPTY_PASSWORD_USED); IdentityMgtConfig config = IdentityMgtConfig.getInstance(); try { // Enforcing the password policies. if (credential != null && (credential instanceof StringBuffer && (credential.toString().trim().length() > 0))) { policyRegistry.enforcePasswordPolicies(credential.toString(), userName); } } catch (PolicyViolationException pe) { throw new UserStoreException(pe.getMessage(), pe); } // empty password account creation if (credential == null || (credential instanceof StringBuffer && (credential.toString().trim().length() < 1))) { if (!config.isEnableTemporaryPassword()) { log.error("Temporary password property is disabled"); throw new UserStoreException(ASK_PASSWORD_FEATURE_IS_DISABLED); } if (log.isDebugEnabled()) { log.debug("Credentials are null. Using a temporary password as credentials"); } // setting the thread-local to check in doPostAddUser IdentityUtil.threadLocalProperties.get().put(EMPTY_PASSWORD_USED, true); // temporary passwords will be used char[] temporaryPassword = null; temporaryPassword = UserIdentityManagementUtil.generateTemporaryPassword(); // setting the password value ((StringBuffer) credential).replace(0, temporaryPassword.length, new String(temporaryPassword)); } // Filtering security question URIs from claims and add them to the thread local dto Map<String, String> userDataMap = new HashMap<String, String>(); // TODO why challenge Q Iterator<Entry<String, String>> it = claims.entrySet().iterator(); while (it.hasNext()) { Entry<String, String> claim = it.next(); if (claim.getKey().contains(UserCoreConstants.ClaimTypeURIs.CHALLENGE_QUESTION_URI) || claim.getKey().contains(UserCoreConstants.ClaimTypeURIs.IDENTITY_CLAIM_URI)) { userDataMap.put(claim.getKey(), claim.getValue()); it.remove(); } } UserIdentityClaimsDO identityDTO = new UserIdentityClaimsDO(userName, userDataMap); identityDTO.setTenantId(userStoreManager.getTenantId()); // adding dto to thread local to be read again from the doPostAddUser method IdentityUtil.threadLocalProperties.get().put(USER_IDENTITY_DO, identityDTO); return true; } /** * This method locks the created accounts based on the account policies or * based on the account confirmation method being used. Two account * confirmation methods are used : Temporary Password and Verification Code. * In the case of temporary password is used the temporary password will be * emailed to the user. In the case of verification code, the code will be * emailed to the user. The security questions filter ad doPreAddUser will * be persisted in this method. */ @Override public boolean doPostAddUser(String userName, Object credential, String[] roleList, Map<String, String> claims, String profile, UserStoreManager userStoreManager) throws UserStoreException { if (!isEnable()) { return true; } // Top level try and finally blocks are used to unset thread local variables try { if (!IdentityUtil.threadLocalProperties.get().containsKey(DO_POST_ADD_USER)) { IdentityUtil.threadLocalProperties.get().put(DO_POST_ADD_USER, true); if (log.isDebugEnabled()) { log.debug("Post add user is called in IdentityMgtEventListener"); } IdentityMgtConfig config = IdentityMgtConfig.getInstance(); // reading the value from the thread local UserIdentityClaimsDO userIdentityClaimsDO = (UserIdentityClaimsDO) IdentityUtil.threadLocalProperties.get().get(USER_IDENTITY_DO); if (config.isEnableUserAccountVerification() && IdentityUtil.threadLocalProperties.get().containsKey(EMPTY_PASSWORD_USED)) { // empty password account creation String domainName = ((org.wso2.carbon.user.core.UserStoreManager) userStoreManager) .getRealmConfiguration().getUserStoreProperty( UserCoreConstants.RealmConfig.PROPERTY_DOMAIN_NAME); if (!UserCoreConstants.PRIMARY_DEFAULT_DOMAIN_NAME.equals(domainName)) { userName = domainName + UserCoreConstants.DOMAIN_SEPARATOR + userName; } // store identity data userIdentityClaimsDO.getUserDataMap().put(UserIdentityDataStore.ACCOUNT_LOCKED_REASON, ""); userIdentityClaimsDO.setAccountLock(false); try { module.store(userIdentityClaimsDO, userStoreManager); } catch (IdentityException e) { //roleback user userStoreManager.deleteUser(userName); throw new UserStoreException("Error while saving user store for user : " + userName, e); } // store identity metadata UserRecoveryDataDO metadataDO = new UserRecoveryDataDO(); metadataDO.setUserName(userName).setTenantId(userStoreManager.getTenantId()) .setCode((String) credential); // set recovery data RecoveryProcessor processor = new RecoveryProcessor(); UserRecoveryDTO recoveryDto = new UserRecoveryDTO(userName); recoveryDto.setNotification(IdentityMgtConstants.Notification.ASK_PASSWORD); recoveryDto.setNotificationType("EMAIL"); recoveryDto.setTenantId(userStoreManager.getTenantId()); NotificationDataDTO notificationDto = null; try { notificationDto = processor.recoverWithNotification(recoveryDto); } catch (IdentityException e) { //roleback user userStoreManager.deleteUser(userName); throw new UserStoreException("Error while sending notification for user : " + userName, e); } return notificationDto != null && notificationDto.isNotificationSent(); } // No account recoveries are defined, no email will be sent. if (config.isAuthPolicyAccountLockOnCreation()) { // accounts are locked. Admin should unlock userIdentityClaimsDO.getUserDataMap().put(UserIdentityDataStore.ACCOUNT_LOCKED_REASON, IdentityMgtConstants.LockedReason.UNVERIFIED.toString()); userIdentityClaimsDO.setAccountLock(true); try { config.getIdentityDataStore().store(userIdentityClaimsDO, userStoreManager); } catch (IdentityException e) { //roleback user userStoreManager.deleteUser(userName); throw new UserStoreException("Error while saving user store data for user : " + userName, e); } } // When claims available in user add request like http://wso2.org/claims/identity/accountLocked if (!config.isEnableUserAccountVerification() && !config.isAuthPolicyAccountLockOnCreation() && userIdentityClaimsDO != null) { try { if (log.isDebugEnabled()) { log.debug("Storing identity-mgt claims since they are available in the addUser request"); } module.store(userIdentityClaimsDO, userStoreManager); } catch (IdentityException e) { //roleback user userStoreManager.deleteUser(userName); throw new UserStoreException("Error while saving user store data for user : " + userName, e); } } } return true; } finally { // Remove thread local variable IdentityUtil.threadLocalProperties.get().remove(DO_POST_ADD_USER); IdentityUtil.threadLocalProperties.get().remove(EMPTY_PASSWORD_USED); } } /** * This method is used to check pre conditions when changing the user * password. */ @Override public boolean doPreUpdateCredential(String userName, Object newCredential, Object oldCredential, UserStoreManager userStoreManager) throws UserStoreException { if (!isEnable()) { return true; } if (log.isDebugEnabled()) { log.debug("Pre update credential is called in IdentityMgtEventListener"); } try { if (!IdentityUtil.threadLocalProperties.get().containsKey(DO_PRE_UPDATE_CREDENTIAL)) { IdentityUtil.threadLocalProperties.get().put(DO_PRE_UPDATE_CREDENTIAL, true); IdentityMgtConfig config = IdentityMgtConfig.getInstance(); UserIdentityDataStore identityDataStore = IdentityMgtConfig.getInstance().getIdentityDataStore(); UserIdentityClaimsDO identityDTO = identityDataStore.load(userName, userStoreManager); boolean isAccountDisabled = false; if (identityDTO != null) { isAccountDisabled = identityDTO.getIsAccountDisabled(); } else { throw new UserStoreException("Cannot get the user account active status."); } if (isAccountDisabled) { IdentityErrorMsgContext customErrorMessageContext = new IdentityErrorMsgContext( IdentityCoreConstants.USER_ACCOUNT_DISABLED_ERROR_CODE); IdentityUtil.setIdentityErrorMsg(customErrorMessageContext); //account is already disabled and trying to update the credential without enabling it log.warn("Trying to update credential of a disabled user account. This is not permitted."); throw new UserStoreException("User account is disabled, can't update credential without enabling."); } try { // Enforcing the password policies. if (newCredential != null && (newCredential instanceof String && (newCredential.toString().trim() .length() > 0))) { policyRegistry.enforcePasswordPolicies(newCredential.toString(), userName); } } catch (PolicyViolationException pe) { throw new UserStoreException(pe.getMessage(), pe); } } return true; } finally { // Remove thread local variable IdentityUtil.threadLocalProperties.get().remove(DO_PRE_UPDATE_CREDENTIAL); } } @Override public boolean doPostUpdateCredentialByAdmin(String userName, Object credential, UserStoreManager userStoreManager) throws UserStoreException { if (!isEnable()) { return true; } Map<String, String> userClaims = new HashMap<>(); userClaims.put(IdentityMgtConstants.LAST_PASSWORD_UPDATE_TIME, Long .toString(System.currentTimeMillis())); userStoreManager.setUserClaimValues(userName, userClaims, null); return true; } /** * This method is used when the admin is updating the credentials with an * empty credential. A random password will be generated and will be mailed * to the user. */ @Override public boolean doPreUpdateCredentialByAdmin(String userName, Object newCredential, UserStoreManager userStoreManager) throws UserStoreException { if (!isEnable()) { return true; } if (log.isDebugEnabled()) { log.debug("Pre update credential by admin is called in IdentityMgtEventListener"); } // Top level try and finally blocks are used to unset thread local variables try { if (!IdentityUtil.threadLocalProperties.get().containsKey(DO_PRE_UPDATE_CREDENTIAL_BY_ADMIN)) { IdentityUtil.threadLocalProperties.get().put(DO_PRE_UPDATE_CREDENTIAL_BY_ADMIN, true); IdentityMgtConfig config = IdentityMgtConfig.getInstance(); UserIdentityDataStore identityDataStore = IdentityMgtConfig.getInstance().getIdentityDataStore(); UserIdentityClaimsDO identityDTO = identityDataStore.load(userName, userStoreManager); boolean isAccountDisabled = false; if (identityDTO != null) { isAccountDisabled = identityDTO.getIsAccountDisabled(); } else { throw new UserStoreException("Cannot get the user account active status."); } if (isAccountDisabled) { IdentityErrorMsgContext customErrorMessageContext = new IdentityErrorMsgContext( IdentityCoreConstants.USER_ACCOUNT_DISABLED_ERROR_CODE); IdentityUtil.setIdentityErrorMsg(customErrorMessageContext); //account is already disabled and trying to update the credential without enabling it log.warn("Trying to update credential of a disabled user account. This is not permitted."); throw new UserStoreException("User account is disabled, can't update credential without enabling."); } try { // Enforcing the password policies. if (newCredential != null && (newCredential instanceof StringBuffer && (newCredential.toString().trim() .length() > 0))) { policyRegistry.enforcePasswordPolicies(newCredential.toString(), userName); } } catch (PolicyViolationException pe) { throw new UserStoreException(pe.getMessage(), pe); } if (newCredential == null || (newCredential instanceof StringBuffer && ((StringBuffer) newCredential) .toString().trim().length() < 1)) { if (!config.isEnableTemporaryPassword()) { log.error("Empty passwords are not allowed"); return false; } if (log.isDebugEnabled()) { log.debug("Credentials are null. Using a temporary password as credentials"); } // temporary passwords will be used char[] temporaryPassword = UserIdentityManagementUtil.generateTemporaryPassword(); // setting the password value ((StringBuffer) newCredential).replace(0, temporaryPassword.length, new String( temporaryPassword)); UserIdentityMgtBean bean = new UserIdentityMgtBean(); bean.setUserId(userName); bean.setConfirmationCode(newCredential.toString()); bean.setRecoveryType(IdentityMgtConstants.Notification.TEMPORARY_PASSWORD); if (log.isDebugEnabled()) { log.debug("Sending the temporary password to the user " + userName); } UserIdentityManagementUtil.notifyViaEmail(bean); } else { if (log.isDebugEnabled()) { log.debug("Updating credentials of user " + userName + " by admin with a non-empty password"); } } } return true; } finally { // Remove thread local variable IdentityUtil.threadLocalProperties.get().remove(DO_PRE_UPDATE_CREDENTIAL_BY_ADMIN); } } /** * This method checks if the updating claim is an user identity data or * security question. Identity data and security questions are updated by * the identity store, therefore they will not be added to the user store. * Other claims are skipped to the set or update. */ @Override public boolean doPreSetUserClaimValue(String userName, String claimURI, String claimValue, String profileName, UserStoreManager userStoreManager) throws UserStoreException { if (!isEnable()) { return true; } //This operation is not supported for Identity Claims if (StringUtils.isNotBlank(claimURI) && claimURI.contains(UserCoreConstants.ClaimTypeURIs.IDENTITY_CLAIM_URI)) { throw new UserStoreException(INVALID_OPERATION + " This operation is not supported for Identity claims"); } return true; } public boolean doPreGetUserClaimValue(String userName, String claim, String profileName, UserStoreManager storeManager) throws UserStoreException { if (!isEnable()) { // a simple user claim. add it to the user store return true; } if (StringUtils.isNotBlank(claim) && claim.contains(UserCoreConstants.ClaimTypeURIs.IDENTITY_CLAIM_URI)) { throw new UserStoreException(INVALID_OPERATION + " This operation is not supported for Identity claims"); } return true; } /** * As in the above method the user account lock claim, primary challenges * claim will be separately handled. Identity claims will be removed from * the claim set before adding claims to the user store. */ @Override public boolean doPreSetUserClaimValues(String userName, Map<String, String> claims, String profileName, UserStoreManager userStoreManager) throws UserStoreException { if (!isEnable()) { return true; } IdentityUtil.threadLocalProperties.get().remove(IdentityCoreConstants.USER_ACCOUNT_STATE); String accountLocked = claims.get(UserIdentityDataStore.ACCOUNT_LOCK); boolean isAccountLocked = false; //Following logic is to avoid null value been interpreted as false if (StringUtils.isNotEmpty(accountLocked)) { isAccountLocked = Boolean.parseBoolean(accountLocked); } // Top level try and finally blocks are used to unset thread local variables try { if (!IdentityUtil.threadLocalProperties.get().containsKey(DO_PRE_SET_USER_CLAIM_VALUES)) { IdentityUtil.threadLocalProperties.get().put(DO_PRE_SET_USER_CLAIM_VALUES, true); IdentityMgtConfig config = IdentityMgtConfig.getInstance(); UserIdentityDataStore identityDataStore = IdentityMgtConfig.getInstance().getIdentityDataStore(); UserIdentityClaimsDO identityDTO = identityDataStore.load(userName, userStoreManager); if (identityDTO == null) { identityDTO = new UserIdentityClaimsDO(userName); identityDTO.setTenantId(userStoreManager.getTenantId()); } Boolean wasAccountDisabled = identityDTO.getIsAccountDisabled(); String accountDisabled = claims.get(UserIdentityDataStore.ACCOUNT_DISABLED); boolean isAccountDisabled = false; if (StringUtils.isNotEmpty(accountDisabled)) { isAccountDisabled = Boolean.parseBoolean(accountDisabled); } else { isAccountDisabled = wasAccountDisabled; } // This thread local can be used to check account lock status of a user. if (isAccountLocked) { IdentityUtil.threadLocalProperties.get().put(IdentityCoreConstants.USER_ACCOUNT_STATE, UserCoreConstants .ErrorCode.USER_IS_LOCKED); } else if (isAccountDisabled) { IdentityUtil.threadLocalProperties.get().put(IdentityCoreConstants.USER_ACCOUNT_STATE, IdentityCoreConstants.USER_ACCOUNT_DISABLED_ERROR_CODE); } else { // do nothing } //account is already disabled and trying to update the claims without enabling it if (wasAccountDisabled && isAccountDisabled) { claims.clear(); log.warn("Trying to update claims of a disabled user account. This is not permitted."); throw new UserStoreException("User account is disabled, can't update claims without enabling."); } Iterator<Entry<String, String>> it = claims.entrySet().iterator(); while (it.hasNext()) { Entry<String, String> claim = it.next(); if (claim.getKey().contains(UserCoreConstants.ClaimTypeURIs.CHALLENGE_QUESTION_URI) || claim.getKey().contains(UserCoreConstants.ClaimTypeURIs.IDENTITY_CLAIM_URI)) { String key = claim.getKey(); String value = claim.getValue(); if (UserIdentityDataStore.ACCOUNT_LOCK.equals(key) && (Boolean.TRUE.toString()). equalsIgnoreCase(value)) { identityDTO.getUserDataMap().put(UserIdentityDataStore.ACCOUNT_LOCKED_REASON, IdentityMgtConstants.LockedReason.ADMIN_INITIATED.toString()); } identityDTO.setUserIdentityDataClaim(key, value); it.remove(); } } // storing the identity claims and security questions try { identityDataStore.store(identityDTO, userStoreManager); int tenantId = userStoreManager.getTenantId(); String domainName = ((org.wso2.carbon.user.core.UserStoreManager) userStoreManager).getRealmConfiguration(). getUserStoreProperty(UserCoreConstants.RealmConfig.PROPERTY_DOMAIN_NAME); String usernameWithDomain = IdentityUtil.addDomainToName(userName, domainName); //case of enabling a disabled user account if (wasAccountDisabled && !isAccountDisabled && IdentityMgtConfig.getInstance().isAccountEnableNotificationSending()) { sendEmail(usernameWithDomain, tenantId, IdentityMgtConstants.Notification.ACCOUNT_ENABLE); //case of disabling an enabled account } else if (!wasAccountDisabled && isAccountDisabled && IdentityMgtConfig.getInstance().isAccountDisableNotificationSending()) { sendEmail(usernameWithDomain, tenantId, IdentityMgtConstants.Notification.ACCOUNT_DISABLE); } } catch (IdentityException e) { throw new UserStoreException( "Error while saving user store data for user : " + userName, e); } } return true; } finally { // Remove thread local variable IdentityUtil.threadLocalProperties.get().remove(DO_PRE_SET_USER_CLAIM_VALUES); } } /** * Deleting user from the identity database. What are the registry keys ? */ @Override public boolean doPostDeleteUser(String userName, UserStoreManager userStoreManager) throws UserStoreException { if (!isEnable()) { return true; } // remove from the identity store try { IdentityMgtConfig.getInstance().getIdentityDataStore() .remove(userName, userStoreManager); } catch (IdentityException e) { throw new UserStoreException("Error while removing user: " + userName + " from identity data store", e); } // deleting registry meta-data UserRegistry registry = null; try { registry = IdentityMgtServiceComponent.getRegistryService().getConfigSystemRegistry( userStoreManager.getTenantId()); String identityKeyMgtPath = IdentityMgtConstants.IDENTITY_MANAGEMENT_KEYS + RegistryConstants.PATH_SEPARATOR + userStoreManager.getTenantId() + RegistryConstants.PATH_SEPARATOR + userName; if (registry.resourceExists(identityKeyMgtPath)) { registry.delete(identityKeyMgtPath); } } catch (RegistryException e) { log.error("Error while deleting recovery data for user : " + userName + " in tenant : " + userStoreManager.getTenantId(), e); } return true; } /** * Adding the user identity data to the claims set */ @Override public boolean doPostGetUserClaimValues(String userName, String[] claims, String profileName, Map<String, String> claimMap, UserStoreManager storeManager) throws UserStoreException { if (!isEnable()) { return true; } if (claimMap == null) { claimMap = new HashMap<String, String>(); } UserIdentityDataStore identityDataStore = IdentityMgtConfig.getInstance() .getIdentityDataStore(); // check if there are identity claims boolean containsIdentityClaims = false; for (String claim : claims) { if (claim.contains(UserCoreConstants.ClaimTypeURIs.CHALLENGE_QUESTION_URI) || claim.contains(UserCoreConstants.ClaimTypeURIs.IDENTITY_CLAIM_URI)) { containsIdentityClaims = true; break; } } // if there are no identity claims, let it go if (!containsIdentityClaims) { return true; } // there is/are identity claim/s . load the dto UserIdentityClaimsDO identityDTO = identityDataStore.load(userName, storeManager); // if no user identity data found, just continue if (identityDTO == null) { return true; } // data found, add the values for security questions and identity claims for (String claim : claims) { if (identityDTO.getUserDataMap().containsKey(claim)) { claimMap.put(claim, identityDTO.getUserDataMap().get(claim)); } } return true; } /** * Returning the user identity data as a claim */ @Override public boolean doPostGetUserClaimValue(String userName, String claim, List<String> claimValue, String profileName, UserStoreManager storeManager) throws UserStoreException { return true; } @Override public boolean doPostUpdateCredential(String userName, Object credential, UserStoreManager userStoreManager) throws UserStoreException { if (!isEnable()) { return true; } Map<String, String> userClaims = new HashMap<>(); userClaims.put(IdentityMgtConstants.LAST_PASSWORD_UPDATE_TIME, Long .toString(System.currentTimeMillis())); userStoreManager.setUserClaimValues(userName, userClaims, null); return true; } private void sendEmail(String userName, int tenantId, String notification) { UserRecoveryDTO dto; String tenantDomain = IdentityTenantUtil.getTenantDomain(tenantId); if (MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) { dto = new UserRecoveryDTO(userName); } else { UserDTO userDTO = new UserDTO(UserCoreUtil.addTenantDomainToEntry(userName, tenantDomain)); userDTO.setTenantId(tenantId); dto = new UserRecoveryDTO(userDTO); } dto.setNotification(notification); dto.setNotificationType(EMAIL_NOTIFICATION_TYPE); try { IdentityMgtServiceComponent.getRecoveryProcessor().recoverWithNotification(dto); } catch (IdentityException e) { //proceed with the rest of the flow even if the email is not sent log.error("Email notification sending failed for user:" + userName + " for " + notification); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.salesforce; import java.net.URI; import java.util.HashMap; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.function.Supplier; import java.util.regex.Pattern; import org.apache.camel.CamelContext; import org.apache.camel.Endpoint; import org.apache.camel.ExtendedCamelContext; import org.apache.camel.SSLContextParametersAware; import org.apache.camel.TypeConverter; import org.apache.camel.component.salesforce.api.SalesforceException; import org.apache.camel.component.salesforce.api.dto.AbstractSObjectBase; import org.apache.camel.component.salesforce.api.utils.SecurityUtils; import org.apache.camel.component.salesforce.internal.OperationName; import org.apache.camel.component.salesforce.internal.PayloadFormat; import org.apache.camel.component.salesforce.internal.SalesforceSession; import org.apache.camel.component.salesforce.internal.client.DefaultRestClient; import org.apache.camel.component.salesforce.internal.client.RestClient; import org.apache.camel.component.salesforce.internal.streaming.SubscriptionHelper; import org.apache.camel.spi.Metadata; import org.apache.camel.spi.annotations.Component; import org.apache.camel.support.DefaultComponent; import org.apache.camel.support.PropertyBindingSupport; import org.apache.camel.support.jsse.KeyStoreParameters; import org.apache.camel.support.jsse.SSLContextParameters; import org.apache.camel.support.service.ServiceHelper; import org.apache.camel.util.ObjectHelper; import org.apache.camel.util.StringHelper; import org.eclipse.jetty.client.HttpProxy; import org.eclipse.jetty.client.Origin; import org.eclipse.jetty.client.ProxyConfiguration; import org.eclipse.jetty.client.Socks4Proxy; import org.eclipse.jetty.client.api.Authentication; import org.eclipse.jetty.client.util.BasicAuthentication; import org.eclipse.jetty.client.util.DigestAuthentication; import org.eclipse.jetty.util.ssl.SslContextFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static org.apache.camel.component.salesforce.SalesforceLoginConfig.DEFAULT_LOGIN_URL; /** * Represents the component that manages {@link SalesforceEndpoint}. */ @Metadata(label = "verifiers", enums = "parameters,connectivity") @Component("salesforce") public class SalesforceComponent extends DefaultComponent implements SSLContextParametersAware { public static final String HTTP_PROXY_HOST = "httpProxyHost"; public static final String HTTP_PROXY_PORT = "httpProxyPort"; public static final String HTTP_PROXY_IS_SOCKS4 = "isHttpProxySocks4"; public static final String HTTP_PROXY_IS_SECURE = "isHttpProxySecure"; public static final String HTTP_PROXY_INCLUDE = "httpProxyInclude"; public static final String HTTP_PROXY_EXCLUDE = "httpProxyExclude"; public static final String HTTP_PROXY_USERNAME = "httpProxyUsername"; public static final String HTTP_PROXY_PASSWORD = "httpProxyPassword"; public static final String HTTP_PROXY_USE_DIGEST_AUTH = "httpProxyUseDigestAuth"; public static final String HTTP_PROXY_AUTH_URI = "httpProxyAuthUri"; public static final String HTTP_PROXY_REALM = "httpProxyRealm"; public static final String HTTP_CONNECTION_TIMEOUT = "httpConnectionTimeout"; public static final String HTTP_IDLE_TIMEOUT = "httpIdleTimeout"; public static final String HTTP_MAX_CONTENT_LENGTH = "httpMaxContentLength"; static final int CONNECTION_TIMEOUT = 60000; static final int IDLE_TIMEOUT = 10000; static final Pattern SOBJECT_NAME_PATTERN = Pattern.compile("^.*[\\?&]sObjectName=([^&,]+).*$"); static final String APEX_CALL_PREFIX = OperationName.APEX_CALL.value() + "/"; private static final Logger LOG = LoggerFactory.getLogger(SalesforceComponent.class); @Metadata(description = "All authentication configuration in one nested bean, all properties set there can be set" + " directly on the component as well", label = "common,security") private SalesforceLoginConfig loginConfig; @Metadata(description = "URL of the Salesforce instance used after authentication, by default received from" + " Salesforce on successful authentication", label = "common,security") private String instanceUrl; // allow fine grained login as well @Metadata(description = "URL of the Salesforce instance used for authentication, by default set to " + DEFAULT_LOGIN_URL, label = "common,security", defaultValue = DEFAULT_LOGIN_URL, required = true) private String loginUrl; @Metadata(description = "OAuth Consumer Key of the connected app configured in the Salesforce instance setup." + " Typically a connected app needs to be configured but one can be provided by installing a package.", label = "common,security", required = true) private String clientId; @Metadata(description = "OAuth Consumer Secret of the connected app configured in the Salesforce instance setup.", label = "common,security", secret = true) private String clientSecret; @Metadata(description = "Refresh token already obtained in the refresh token OAuth flow. One needs to setup a web" + " application and configure a callback URL to receive the refresh token, or configure using the builtin" + " callback at https://login.salesforce.com/services/oauth2/success or " + " https://test.salesforce.com/services/oauth2/success and then retrive the refresh_token from the URL at the" + " end of the flow. Note that in development organizations Salesforce allows hosting the callback web " + " application at localhost.", label = "common,security", secret = true) private String refreshToken; @Metadata(description = "Username used in OAuth flow to gain access to access token. It's easy to get started with" + " password OAuth flow, but in general one should avoid it as it is deemed less secure than other flows.", label = "common,security") private String userName; @Metadata(description = "Password used in OAuth flow to gain access to access token. It's easy to get started with" + " password OAuth flow, but in general one should avoid it as it is deemed less secure than other flows." + " Make sure that you append security token to the end of the password if using one.", label = "common,security", secret = true) private String password; @Metadata(description = "KeyStore parameters to use in OAuth JWT flow. The KeyStore should contain only one entry" + " with private key and certificate. Salesforce does not verify the certificate chain, so this can easily be" + " a selfsigned certificate. Make sure that you upload the certificate to the corresponding connected app.", label = "common,security", secret = true) private KeyStoreParameters keystore; @Metadata(description = "Explicit authentication method to be used, one of USERNAME_PASSWORD, REFRESH_TOKEN or JWT." + " Salesforce component can auto-determine the authentication method to use from the properties set, set this " + " property to eliminate any ambiguity.", label = "common,security", enums = "USERNAME_PASSWORD,REFRESH_TOKEN,JWT") private AuthenticationType authenticationType; @Metadata(description = "If set to true prevents the component from authenticating to Salesforce with the start of" + " the component. You would generally set this to the (default) false and authenticate early and be immediately" + " aware of any authentication issues.", defaultValue = "false", label = "common,security") private boolean lazyLogin; @Metadata(description = "Global endpoint configuration - use to set values that are common to all endpoints", label = "common,advanced") private SalesforceEndpointConfig config; @Metadata(description = "Timeout used by the HttpClient when waiting for response from the Salesforce server.", label = "common", defaultValue = "" + IDLE_TIMEOUT) private long httpClientIdleTimeout = IDLE_TIMEOUT; @Metadata(description = "Connection timeout used by the HttpClient when connecting to the Salesforce server.", label = "common", defaultValue = "" + CONNECTION_TIMEOUT) private long httpClientConnectionTimeout = CONNECTION_TIMEOUT; @Metadata(description = "Max content length of an HTTP response.", label = "common") private Integer httpMaxContentLength; @Metadata(description = "Used to set any properties that can be configured on the underlying HTTP client. Have a" + " look at properties of SalesforceHttpClient and the Jetty HttpClient for all available options.", label = "common,advanced") private Map<String, Object> httpClientProperties; @Metadata(description = "Used to set any properties that can be configured on the LongPollingTransport used by the" + " BayeuxClient (CometD) used by the streaming api", label = "common,advanced") private Map<String, Object> longPollingTransportProperties; @Metadata(description = "SSL parameters to use, see SSLContextParameters class for all available options.", label = "common,security") private SSLContextParameters sslContextParameters; @Metadata(description = "Enable usage of global SSL context parameters", label = "security", defaultValue = "false") private boolean useGlobalSslContextParameters; // Proxy host and port @Metadata(description = "Hostname of the HTTP proxy server to use.", label = "common,proxy") private String httpProxyHost; @Metadata(description = "Port number of the HTTP proxy server to use.", label = "common,proxy") private Integer httpProxyPort; @Metadata(description = "If set to true the configures the HTTP proxy to use as a SOCKS4 proxy.", defaultValue = "false", label = "common,proxy") private boolean httpProxySocks4; @Metadata(description = "If set to false disables the use of TLS when accessing the HTTP proxy.", defaultValue = "true", label = "common,proxy,security") private boolean httpProxySecure = true; @Metadata(description = "A list of addresses for which HTTP proxy server should be used.", label = "common,proxy") private Set<String> httpProxyIncludedAddresses; @Metadata(description = "A list of addresses for which HTTP proxy server should not be used.", label = "common,proxy") private Set<String> httpProxyExcludedAddresses; // Proxy basic authentication @Metadata(description = "Username to use to authenticate against the HTTP proxy server.", label = "common,proxy,security") private String httpProxyUsername; @Metadata(description = "Password to use to authenticate against the HTTP proxy server.", label = "common,proxy,security", secret = true) private String httpProxyPassword; @Metadata(description = "Used in authentication against the HTTP proxy server, needs to match the URI of the proxy" + " server in order for the httpProxyUsername and httpProxyPassword to be used for authentication.", label = "common,proxy,security") private String httpProxyAuthUri; @Metadata(description = "Realm of the proxy server, used in preemptive Basic/Digest authentication methods against" + " the HTTP proxy server.", label = "common,proxy,security") private String httpProxyRealm; @Metadata(description = "If set to true Digest authentication will be used when authenticating to the HTTP proxy," + " otherwise Basic authorization method will be used", defaultValue = "false", label = "common,proxy,security") private boolean httpProxyUseDigestAuth; @Metadata(description = "In what packages are the generated DTO classes. Typically the classes would be generated" + " using camel-salesforce-maven-plugin. Set it if using the generated DTOs to gain the benefit of using short " + " SObject names in parameters/header values. Multiple packages can be separated by comma.", javaType = "java.lang.String", label = "common") private String packages; // component state private SalesforceHttpClient httpClient; private SalesforceSession session; private Map<String, Class<?>> classMap; // Lazily created helper for consumer endpoints private SubscriptionHelper subscriptionHelper; public SalesforceComponent() { this(null); } public SalesforceComponent(CamelContext context) { super(context); registerExtension(SalesforceComponentVerifierExtension::new); registerExtension(SalesforceMetaDataExtension::new); } @Override protected Endpoint createEndpoint(String uri, String remaining, Map<String, Object> parameters) throws Exception { // get Operation from remaining URI OperationName operationName = null; String topicName = null; String apexUrl = null; try { LOG.debug("Creating endpoint for: {}", remaining); if (remaining.startsWith(APEX_CALL_PREFIX)) { // extract APEX URL apexUrl = remaining.substring(APEX_CALL_PREFIX.length()); remaining = OperationName.APEX_CALL.value(); } operationName = OperationName.fromValue(remaining); } catch (IllegalArgumentException ex) { // if its not an operation name, treat is as topic name for consumer // endpoints topicName = remaining; } // create endpoint config if (config == null) { config = new SalesforceEndpointConfig(); } if (config.getHttpClient() == null) { // set the component's httpClient as default config.setHttpClient(httpClient); } // create a deep copy and map parameters final SalesforceEndpointConfig copy = config.copy(); setProperties(copy, parameters); // set apexUrl in endpoint config if (apexUrl != null) { copy.setApexUrl(apexUrl); } final SalesforceEndpoint endpoint = new SalesforceEndpoint(uri, this, copy, operationName, topicName); // map remaining parameters to endpoint (specifically, synchronous) setProperties(endpoint, parameters); // if operation is APEX call, map remaining parameters to query params if (operationName == OperationName.APEX_CALL && !parameters.isEmpty()) { Map<String, Object> queryParams = new HashMap<>(copy.getApexQueryParams()); // override component params with endpoint params queryParams.putAll(parameters); parameters.clear(); copy.setApexQueryParams(queryParams); } return endpoint; } private Map<String, Class<?>> parsePackages() { Map<String, Class<?>> result = new HashMap<>(); Set<Class<?>> classes = getCamelContext().adapt(ExtendedCamelContext.class).getPackageScanClassResolver() .findImplementations(AbstractSObjectBase.class, getPackagesAsArray()); for (Class<?> aClass : classes) { result.put(aClass.getSimpleName(), aClass); } return result; } public SalesforceHttpClient getHttpClient() { return httpClient; } @Override protected void doStart() throws Exception { super.doStart(); if (loginConfig == null) { loginConfig = new SalesforceLoginConfig(); loginConfig.setInstanceUrl(instanceUrl); loginConfig.setClientId(clientId); loginConfig.setClientSecret(clientSecret); loginConfig.setKeystore(keystore); loginConfig.setLazyLogin(lazyLogin); loginConfig.setLoginUrl(loginUrl); loginConfig.setPassword(password); loginConfig.setRefreshToken(refreshToken); loginConfig.setType(authenticationType); loginConfig.setUserName(userName); LOG.debug("Created login configuration: {}", loginConfig); } else { LOG.debug("Using shared login configuration: {}", loginConfig); } // create a Jetty HttpClient if not already set if (httpClient == null) { final SSLContextParameters contextParameters = Optional.ofNullable(sslContextParameters) .orElseGet(() -> Optional.ofNullable(retrieveGlobalSslContextParameters()) .orElseGet(() -> new SSLContextParameters())); final SslContextFactory sslContextFactory = new SslContextFactory(); sslContextFactory.setSslContext(contextParameters.createSSLContext(getCamelContext())); httpClient = createHttpClient(sslContextFactory); getConfig().setHttpClient(httpClient); } if (httpClientProperties == null) { httpClientProperties = new HashMap<>(); } defineComponentPropertiesIn(httpClientProperties, this); setupHttpClient(httpClient, getCamelContext(), httpClientProperties); // support restarts if (session == null) { session = new SalesforceSession(getCamelContext(), httpClient, httpClient.getTimeout(), loginConfig); } // set session before calling start() httpClient.setSession(session); // start the Jetty client to initialize thread pool, etc. httpClient.start(); // login at startup if lazyLogin is disabled if (!loginConfig.isLazyLogin()) { ServiceHelper.startService(session); } if (packages != null) { // parse the packages to create SObject name to class map classMap = parsePackages(); LOG.info("Found {} generated classes in packages: {}", classMap.size(), packages); } else { // use an empty map to avoid NPEs later LOG.warn("Missing property packages, getSObject* operations will NOT work without property rawPayload=true"); classMap = new HashMap<>(0); } if (subscriptionHelper != null) { ServiceHelper.startService(subscriptionHelper); } } @Override protected void doStop() throws Exception { if (classMap != null) { classMap.clear(); } try { if (subscriptionHelper != null) { // shutdown all streaming connections // note that this is done in the component, and not in consumer ServiceHelper.stopService(subscriptionHelper); subscriptionHelper = null; } if (session != null && session.getAccessToken() != null) { // logout of Salesforce ServiceHelper.stopService(session); } } finally { if (httpClient != null) { // shutdown http client connections httpClient.stop(); // destroy http client if it was created by the component if (config != null && config.getHttpClient() == null) { httpClient.destroy(); } httpClient = null; } } } public SubscriptionHelper getSubscriptionHelper() throws Exception { if (subscriptionHelper == null) { // lazily create subscription helper subscriptionHelper = new SubscriptionHelper(this); } return subscriptionHelper; } public AuthenticationType getAuthenticationType() { return authenticationType; } public void setAuthenticationType(AuthenticationType authenticationType) { this.authenticationType = authenticationType; } public SalesforceLoginConfig getLoginConfig() { return loginConfig; } public void setLoginConfig(SalesforceLoginConfig loginConfig) { this.loginConfig = loginConfig; } public void setInstanceUrl(String instanceUrl) { this.instanceUrl = instanceUrl; } public String getInstanceUrl() { return instanceUrl; } public String getLoginUrl() { return loginUrl; } public void setLoginUrl(String loginUrl) { this.loginUrl = loginUrl; } public String getClientId() { return clientId; } public void setClientId(String clientId) { this.clientId = clientId; } public String getClientSecret() { return clientSecret; } public void setClientSecret(String clientSecret) { this.clientSecret = clientSecret; } public void setKeystore(final KeyStoreParameters keystore) { this.keystore = keystore; } public KeyStoreParameters getKeystore() { return keystore; } public String getRefreshToken() { return refreshToken; } public void setRefreshToken(String refreshToken) { this.refreshToken = refreshToken; } public String getUserName() { return userName; } public void setUserName(String userName) { this.userName = userName; } public String getPassword() { return password; } public void setPassword(String password) { this.password = password; } public boolean isLazyLogin() { return lazyLogin; } public void setLazyLogin(boolean lazyLogin) { this.lazyLogin = lazyLogin; } public SalesforceEndpointConfig getConfig() { return config; } public void setConfig(SalesforceEndpointConfig config) { this.config = config; } public Map<String, Object> getHttpClientProperties() { return httpClientProperties; } public void setHttpClientProperties(Map<String, Object> httpClientProperties) { this.httpClientProperties = httpClientProperties; } public Map<String, Object> getLongPollingTransportProperties() { return longPollingTransportProperties; } public void setLongPollingTransportProperties(Map<String, Object> longPollingTransportProperties) { this.longPollingTransportProperties = longPollingTransportProperties; } public SSLContextParameters getSslContextParameters() { return sslContextParameters; } public void setSslContextParameters(SSLContextParameters sslContextParameters) { this.sslContextParameters = sslContextParameters; } @Override public boolean isUseGlobalSslContextParameters() { return this.useGlobalSslContextParameters; } @Override public void setUseGlobalSslContextParameters(boolean useGlobalSslContextParameters) { this.useGlobalSslContextParameters = useGlobalSslContextParameters; } public long getHttpClientIdleTimeout() { return httpClientIdleTimeout; } public void setHttpClientIdleTimeout(long httpClientIdleTimeout) { this.httpClientIdleTimeout = httpClientIdleTimeout; } public long getHttpClientConnectionTimeout() { return httpClientConnectionTimeout; } public void setHttpClientConnectionTimeout(long httpClientConnectionTimeout) { this.httpClientConnectionTimeout = httpClientConnectionTimeout; } public Integer getHttpMaxContentLength() { return httpMaxContentLength; } public void setHttpMaxContentLength(Integer httpMaxContentLength) { this.httpMaxContentLength = httpMaxContentLength; } public String getHttpProxyHost() { return httpProxyHost; } public void setHttpProxyHost(String httpProxyHost) { this.httpProxyHost = httpProxyHost; } public Integer getHttpProxyPort() { return httpProxyPort; } public void setHttpProxyPort(Integer httpProxyPort) { this.httpProxyPort = httpProxyPort; } public String getHttpProxyUsername() { return httpProxyUsername; } public void setHttpProxyUsername(String httpProxyUsername) { this.httpProxyUsername = httpProxyUsername; } public String getHttpProxyPassword() { return httpProxyPassword; } public void setHttpProxyPassword(String httpProxyPassword) { this.httpProxyPassword = httpProxyPassword; } public boolean isHttpProxySocks4() { return httpProxySocks4; } public void setHttpProxySocks4(boolean isHttpProxySocks4) { this.httpProxySocks4 = isHttpProxySocks4; } public boolean isHttpProxySecure() { return httpProxySecure; } public void setHttpProxySecure(boolean isHttpProxySecure) { this.httpProxySecure = isHttpProxySecure; } public Set<String> getHttpProxyIncludedAddresses() { return httpProxyIncludedAddresses; } public void setHttpProxyIncludedAddresses(Set<String> httpProxyIncludedAddresses) { this.httpProxyIncludedAddresses = httpProxyIncludedAddresses; } public Set<String> getHttpProxyExcludedAddresses() { return httpProxyExcludedAddresses; } public void setHttpProxyExcludedAddresses(Set<String> httpProxyExcludedAddresses) { this.httpProxyExcludedAddresses = httpProxyExcludedAddresses; } public String getHttpProxyAuthUri() { return httpProxyAuthUri; } public void setHttpProxyAuthUri(String httpProxyAuthUri) { this.httpProxyAuthUri = httpProxyAuthUri; } public String getHttpProxyRealm() { return httpProxyRealm; } public void setHttpProxyRealm(String httpProxyRealm) { this.httpProxyRealm = httpProxyRealm; } public boolean isHttpProxyUseDigestAuth() { return httpProxyUseDigestAuth; } public void setHttpProxyUseDigestAuth(boolean httpProxyUseDigestAuth) { this.httpProxyUseDigestAuth = httpProxyUseDigestAuth; } public String getPackages() { return packages; } public void setPackages(String packages) { this.packages = packages; } public String[] getPackagesAsArray() { if (packages != null) { return packages.split(","); } else { return null; } } public SalesforceSession getSession() { return session; } public Map<String, Class<?>> getClassMap() { return classMap; } public RestClient createRestClientFor(final SalesforceEndpoint endpoint) throws SalesforceException { final SalesforceEndpointConfig endpointConfig = endpoint.getConfiguration(); return createRestClientFor(endpointConfig); } RestClient createRestClientFor(SalesforceEndpointConfig endpointConfig) throws SalesforceException { final String version = endpointConfig.getApiVersion(); final PayloadFormat format = endpointConfig.getFormat(); return new DefaultRestClient(httpClient, version, format, session, loginConfig); } RestClient createRestClient(final Map<String, Object> properties) throws Exception { final SalesforceEndpointConfig modifiedConfig = Optional.ofNullable(config).map(SalesforceEndpointConfig::copy) .orElseGet(() -> new SalesforceEndpointConfig()); final CamelContext camelContext = getCamelContext(); PropertyBindingSupport.bindProperties(camelContext, modifiedConfig, properties); return createRestClientFor(modifiedConfig); } static RestClient createRestClient(final CamelContext camelContext, final Map<String, Object> properties) throws Exception { final SalesforceEndpointConfig config = new SalesforceEndpointConfig(); // let's work with a copy so original properties are intact PropertyBindingSupport.bindProperties(camelContext, config, new HashMap<>(properties)); final SalesforceLoginConfig loginConfig = new SalesforceLoginConfig(); // let's work with a copy so original properties are intact PropertyBindingSupport.bindProperties(camelContext, loginConfig, new HashMap<>(properties)); final SSLContextParameters sslContextParameters = Optional.ofNullable(camelContext.getSSLContextParameters()).orElseGet(() -> new SSLContextParameters()); // let's work with a copy so original properties are intact PropertyBindingSupport.bindProperties(camelContext, sslContextParameters, new HashMap<>(properties)); final SslContextFactory sslContextFactory = new SslContextFactory(); sslContextFactory.setSslContext(sslContextParameters.createSSLContext(camelContext)); final SalesforceHttpClient httpClient = createHttpClient(sslContextFactory); setupHttpClient(httpClient, camelContext, properties); final SalesforceSession session = new SalesforceSession(camelContext, httpClient, httpClient.getTimeout(), loginConfig); httpClient.setSession(session); return new DefaultRestClient(httpClient, config.getApiVersion(), config.getFormat(), session, loginConfig); } static SalesforceHttpClient createHttpClient(final SslContextFactory sslContextFactory) throws Exception { SecurityUtils.adaptToIBMCipherNames(sslContextFactory); final SalesforceHttpClient httpClient = new SalesforceHttpClient(sslContextFactory); // default settings, use httpClientProperties to set other // properties httpClient.setConnectTimeout(CONNECTION_TIMEOUT); httpClient.setIdleTimeout(IDLE_TIMEOUT); return httpClient; } static SalesforceHttpClient setupHttpClient( final SalesforceHttpClient httpClient, final CamelContext camelContext, final Map<String, Object> httpClientProperties) throws Exception { if (httpClientProperties == null || httpClientProperties.isEmpty()) { return httpClient; } // set HTTP client parameters final TypeConverter typeConverter = camelContext.getTypeConverter(); PropertyBindingSupport.bindProperties(camelContext, httpClient, new HashMap<>(httpClientProperties)); final Long httpConnectionTimeout = typeConverter.convertTo(Long.class, httpClientProperties.get(HTTP_CONNECTION_TIMEOUT)); final Long httpIdleTimeout = typeConverter.convertTo(Long.class, httpClientProperties.get(HTTP_IDLE_TIMEOUT)); final Integer maxContentLength = typeConverter.convertTo(Integer.class, httpClientProperties.get(HTTP_MAX_CONTENT_LENGTH)); final String httpProxyHost = typeConverter.convertTo(String.class, httpClientProperties.get(HTTP_PROXY_HOST)); final Integer httpProxyPort = typeConverter.convertTo(Integer.class, httpClientProperties.get(HTTP_PROXY_PORT)); final boolean isHttpProxySocks4 = typeConverter.convertTo(boolean.class, httpClientProperties.get(HTTP_PROXY_IS_SOCKS4)); final boolean isHttpProxySecure = typeConverter.convertTo(boolean.class, httpClientProperties.get(HTTP_PROXY_IS_SECURE)); @SuppressWarnings("unchecked") final Set<String> httpProxyIncludedAddresses = (Set<String>) httpClientProperties.get(HTTP_PROXY_INCLUDE); @SuppressWarnings("unchecked") final Set<String> httpProxyExcludedAddresses = (Set<String>) httpClientProperties.get(HTTP_PROXY_EXCLUDE); final String httpProxyUsername = typeConverter.convertTo(String.class, httpClientProperties.get(HTTP_PROXY_USERNAME)); final String httpProxyPassword = typeConverter.convertTo(String.class, httpClientProperties.get(HTTP_PROXY_PASSWORD)); final String httpProxyAuthUri = typeConverter.convertTo(String.class, httpClientProperties.get(HTTP_PROXY_AUTH_URI)); final String httpProxyRealm = typeConverter.convertTo(String.class, httpClientProperties.get(HTTP_PROXY_REALM)); final boolean httpProxyUseDigestAuth = typeConverter.convertTo(boolean.class, httpClientProperties.get(HTTP_PROXY_USE_DIGEST_AUTH)); // set HTTP timeout settings if (httpIdleTimeout != null) { httpClient.setIdleTimeout(httpIdleTimeout); } if (httpConnectionTimeout != null) { httpClient.setConnectTimeout(httpConnectionTimeout); } if (maxContentLength != null) { httpClient.setMaxContentLength(maxContentLength); } // set HTTP proxy settings if (httpProxyHost != null && httpProxyPort != null) { Origin.Address proxyAddress = new Origin.Address(httpProxyHost, httpProxyPort); ProxyConfiguration.Proxy proxy; if (isHttpProxySocks4) { proxy = new Socks4Proxy(proxyAddress, isHttpProxySecure); } else { proxy = new HttpProxy(proxyAddress, isHttpProxySecure); } if (httpProxyIncludedAddresses != null && !httpProxyIncludedAddresses.isEmpty()) { proxy.getIncludedAddresses().addAll(httpProxyIncludedAddresses); } if (httpProxyExcludedAddresses != null && !httpProxyExcludedAddresses.isEmpty()) { proxy.getExcludedAddresses().addAll(httpProxyExcludedAddresses); } httpClient.getProxyConfiguration().getProxies().add(proxy); } if (httpProxyUsername != null && httpProxyPassword != null) { StringHelper.notEmpty(httpProxyAuthUri, "httpProxyAuthUri"); StringHelper.notEmpty(httpProxyRealm, "httpProxyRealm"); final Authentication authentication; if (httpProxyUseDigestAuth) { authentication = new DigestAuthentication( new URI(httpProxyAuthUri), httpProxyRealm, httpProxyUsername, httpProxyPassword); } else { authentication = new BasicAuthentication( new URI(httpProxyAuthUri), httpProxyRealm, httpProxyUsername, httpProxyPassword); } httpClient.getAuthenticationStore().addAuthentication(authentication); } return httpClient; } private static void defineComponentPropertiesIn( final Map<String, Object> httpClientProperties, final SalesforceComponent salesforce) { putValueIfGivenTo(httpClientProperties, HTTP_IDLE_TIMEOUT, salesforce::getHttpClientIdleTimeout); putValueIfGivenTo(httpClientProperties, HTTP_CONNECTION_TIMEOUT, salesforce::getHttpClientConnectionTimeout); putValueIfGivenTo(httpClientProperties, HTTP_PROXY_HOST, salesforce::getHttpProxyHost); putValueIfGivenTo(httpClientProperties, HTTP_PROXY_PORT, salesforce::getHttpProxyPort); putValueIfGivenTo(httpClientProperties, HTTP_PROXY_INCLUDE, salesforce::getHttpProxyIncludedAddresses); putValueIfGivenTo(httpClientProperties, HTTP_PROXY_EXCLUDE, salesforce::getHttpProxyExcludedAddresses); putValueIfGivenTo(httpClientProperties, HTTP_PROXY_USERNAME, salesforce::getHttpProxyUsername); putValueIfGivenTo(httpClientProperties, HTTP_PROXY_PASSWORD, salesforce::getHttpProxyPassword); putValueIfGivenTo(httpClientProperties, HTTP_PROXY_REALM, salesforce::getHttpProxyRealm); putValueIfGivenTo(httpClientProperties, HTTP_PROXY_AUTH_URI, salesforce::getHttpProxyAuthUri); putValueIfGivenTo(httpClientProperties, HTTP_MAX_CONTENT_LENGTH, salesforce::getHttpMaxContentLength); if (ObjectHelper.isNotEmpty(salesforce.getHttpProxyHost())) { // let's not put `false` values in client properties if no proxy is // used putValueIfGivenTo(httpClientProperties, HTTP_PROXY_IS_SOCKS4, salesforce::isHttpProxySocks4); putValueIfGivenTo(httpClientProperties, HTTP_PROXY_IS_SECURE, salesforce::isHttpProxySecure); putValueIfGivenTo(httpClientProperties, HTTP_PROXY_USE_DIGEST_AUTH, salesforce::isHttpProxyUseDigestAuth); } } private static void putValueIfGivenTo( final Map<String, Object> properties, final String key, final Supplier<Object> valueSupplier) { final Object value = valueSupplier.get(); if (ObjectHelper.isNotEmpty(value)) { properties.putIfAbsent(key, value); } } }
// Copyright 2017 The Nomulus Authors. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package google.registry.xml; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.truth.Truth.assertWithMessage; import static google.registry.util.DiffUtils.prettyPrintXmlDeepDiff; import static org.joda.time.DateTimeZone.UTC; import com.google.common.base.Splitter; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.net.InetAddresses; import com.google.common.net.InternetDomainName; import java.util.AbstractMap; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import javax.annotation.Nullable; import org.joda.time.format.ISODateTimeFormat; import org.json.JSONArray; import org.json.JSONObject; import org.json.XML; /** Helper class for unit tests that need XML. */ public class XmlTestUtils { /** * Asserts that the two XML strings match. * * <p>Note that the actual XML must start with a UTF-8 standalone XML header, but the expected XML * has no such restriction (and typically lacks the header entirely). */ public static void assertXmlEquals(String expected, String actual, String... ignoredPaths) throws Exception { assertXmlEqualsWithMessage(expected, actual, "", ignoredPaths); } /** * Asserts that the two XML strings match, but ignoring the XML header. * * <p>Do NOT use this for assertions about results of EPP flows, as the XML header is required per * the EPP spec for those. Rather, use this for raw operations on EPP XMLs, in situations where * the header may be absent or incorrect (e.g. because you did operations on raw EPP XML directly * loaded from a file without passing it through an EPP flow). */ public static void assertXmlEqualsIgnoreHeader( String expected, String actual, String... ignoredPaths) throws Exception { assertXmlEqualsWithMessageHelper(expected, actual, "", ignoredPaths); } public static void assertXmlEqualsWithMessage( String expected, String actual, String message, String... ignoredPaths) throws Exception { if (!actual.startsWith("<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"yes\"?>")) { assertWithMessage("XML declaration not found at beginning:\n%s", actual).fail(); } assertXmlEqualsWithMessageHelper(expected, actual, message, ignoredPaths); } private static void assertXmlEqualsWithMessageHelper( String expected, String actual, String message, String... ignoredPaths) throws Exception { Map<String, Object> expectedMap = toComparableJson(expected, ignoredPaths); Map<String, Object> actualMap = toComparableJson(actual, ignoredPaths); if (!expectedMap.equals(actualMap)) { assertWithMessage( "%s: Expected:\n%s\n\nActual:\n%s\n\nDiff:\n%s\n\n", message, expected, actual, prettyPrintXmlDeepDiff(expectedMap, actualMap, null)) .fail(); } } /** * Map an element or attribute name using a namespace map to replace the namespace identifier * with the complete URI as given in the map. If the name has no namespace identifier, the default * namespace mapping is used. If the namespace identifier does not exist in the map, the name is * left unchanged. */ private static String mapName( @Nullable String name, Map<String, String> nsMap, boolean mapDefaultNamespace) { if (name == null) { return null; } String ns; String simpleKey; List<String> components = Splitter.on(':').splitToList(name); // Handle names without identifiers, meaning they are in the default namespace. if (components.size() < 2) { if (!mapDefaultNamespace) { return name; } ns = ""; simpleKey = name; // Handle names with identifiers. } else { ns = components.get(0); simpleKey = components.get(1); } // If the map does not contain the specified identifier (or "" for the default), don't do // anything. if (nsMap.containsKey(ns)) { ns = nsMap.get(ns); } return ns.isEmpty() ? simpleKey : (ns + ':' + simpleKey); } /** * Deeply explore the object and normalize values so that things we consider equal compare so. * The return value consists of two parts: the updated key and the value. The value is * straightforward enough: it is the rendering of the subtree to be attached at the current point. * The key is more complicated, because of namespaces. When an XML element specifies namespaces * using xmlns attributes, those namespaces apply to the element as well as all of its * descendants. That means that, when prefixing the element name with the full namespace path, * as required to do proper comparison, the element name depends on its children. When looping * through a JSONObject map, we can't just recursively generate the value and store it using the * key. We may have to update the key as well, to get the namespaces correct. A returned key of * null indicates that we should use the existing key. A non-null key indicates that we should * replace the existing key. * * @param elementName the name under which the current subtree was found, or null if the current * subtree's name is nonexistent or irrelevant * @param obj the current subtree * @param path the (non-namespaced) element path used for ignoredPaths purposes * @param ignoredPaths the set of paths whose values should be set to IGNORED * @param nsMap the inherited namespace identifier-to-URI map * @return the key under which the rendered subtree should be stored (or null), and the rendered * subtree */ private static Map.Entry<String, Object> normalize( @Nullable String elementName, Object obj, @Nullable String path, Set<String> ignoredPaths, Map<String, String> nsMap) throws Exception { if (obj instanceof JSONObject) { JSONObject jsonObject = (JSONObject) obj; Map<String, Object> map = new HashMap<>(); String[] names = JSONObject.getNames(jsonObject); if (names != null) { // Separate all elements and keys into namespace specifications, which we must process // first, and everything else. ImmutableList.Builder<String> namespacesBuilder = new ImmutableList.Builder<>(); ImmutableList.Builder<String> othersBuilder = new ImmutableList.Builder<>(); for (String key : names) { (key.startsWith("xmlns") ? namespacesBuilder : othersBuilder).add(key); } // First, handle all namespace specifications, updating our ns-to-URI map. Use a HashMap // rather than an ImmutableMap.Builder so that we can override existing map entries. HashMap<String, String> newNsMap = new HashMap<>(nsMap); for (String key : namespacesBuilder.build()) { // Parse the attribute name, of the form xmlns:nsid, and extract the namespace identifier. // If there's no colon, we are setting the default namespace. List<String> components = Splitter.on(':').splitToList(key); String ns = (components.size() >= 2) ? components.get(1) : ""; newNsMap.put(ns, jsonObject.get(key).toString()); } nsMap = ImmutableMap.copyOf(newNsMap); // Now, handle the non-namespace items, recursively transforming the map and mapping all // namespaces to the full URI for proper comparison. for (String key : othersBuilder.build()) { String simpleKey = Iterables.getLast(Splitter.on(':').split(key)); String newPath = (path == null) ? simpleKey : (path + "." + simpleKey); String mappedKey; Object value; if (ignoredPaths.contains(newPath)) { mappedKey = null; // Set ignored fields to a value that will compare equal. value = "IGNORED"; } else { Map.Entry<String, Object> simpleEntry = normalize(key, jsonObject.get(key), newPath, ignoredPaths, nsMap); mappedKey = simpleEntry.getKey(); value = simpleEntry.getValue(); } if (mappedKey == null) { // Note that this does not follow the XML rules exactly. I read somewhere that attribute // names, unlike element names, never use the default namespace. But after // JSONification, we cannot distinguish between attributes and child elements, so we // apply the default namespace to everything. Hopefully that will not cause a problem. mappedKey = key.equals("content") ? key : mapName(key, nsMap, true); } map.put(mappedKey, value); } } // Map the namespace of the element name of the map we are normalizing. elementName = mapName(elementName, nsMap, true); // If a node has both text content and attributes, the text content will end up under a key // called "content". If that's the only thing left (which will only happen if there was an // "xmlns:*" key that we removed), treat the node as just text and recurse. if (map.size() == 1 && map.containsKey("content")) { return new AbstractMap.SimpleEntry<>( elementName, normalize(null, jsonObject.get("content"), path, ignoredPaths, nsMap).getValue()); } // The conversion to JSON converts <a/> into "" and the semantically equivalent <a></a> into // an empty map, so normalize that here. return new AbstractMap.SimpleEntry<>(elementName, map.isEmpty() ? "" : map); } if (obj instanceof JSONArray) { // Another problem resulting from JSONification: If the array contains elements whose names // are the same before URI expansion, but different after URI expansion, because they use // xmlns attribute that define the namespaces differently, we will screw up. Again, hopefully // that doesn't happen much. The reverse is also true: If the array contains names that are // different before URI expansion, but the same after, we may have a problem, because the // elements will wind up in different JSONArrays as a result of JSONification. We wave our // hands and just assume that the URI expansion of the first element holds for all others. Set<Object> set = new HashSet<>(); String mappedKey = null; for (int i = 0; i < ((JSONArray) obj).length(); ++i) { Map.Entry<String, Object> simpleEntry = normalize(null, ((JSONArray) obj).get(i), path, ignoredPaths, nsMap); if (i == 0) { mappedKey = simpleEntry.getKey(); } set.add(simpleEntry.getValue()); } return new AbstractMap.SimpleEntry<>(mappedKey, set); } if (obj instanceof Number) { return new AbstractMap.SimpleEntry<>(null, obj.toString()); } if (obj instanceof Boolean) { return new AbstractMap.SimpleEntry<>(null, ((Boolean) obj) ? "1" : "0"); } if (obj instanceof String) { // Turn stringified booleans into integers. Both are acceptable as xml boolean values, but // we use "true" and "false" whereas the samples use "1" and "0". if (obj.equals("true")) { return new AbstractMap.SimpleEntry<>(null, "1"); } if (obj.equals("false")) { return new AbstractMap.SimpleEntry<>(null, "0"); } String string = obj.toString(); // We use a slightly different datetime format (both legal) than the samples, so normalize // both into Datetime objects. try { return new AbstractMap.SimpleEntry<>( null, ISODateTimeFormat.dateTime().parseDateTime(string).toDateTime(UTC)); } catch (IllegalArgumentException e) { // It wasn't a DateTime. } try { return new AbstractMap.SimpleEntry<>( null, ISODateTimeFormat.dateTimeNoMillis().parseDateTime(string).toDateTime(UTC)); } catch (IllegalArgumentException e) { // It wasn't a DateTime. } try { if (!InternetDomainName.isValid(string)) { // It's not a domain name, but it is an InetAddress. Ergo, it's an ip address. return new AbstractMap.SimpleEntry<>(null, InetAddresses.forString(string)); } } catch (IllegalArgumentException e) { // Not an ip address. } return new AbstractMap.SimpleEntry<>(null, string); } return new AbstractMap.SimpleEntry<>(null, checkNotNull(obj)); } @SuppressWarnings("unchecked") private static Map<String, Object> toComparableJson( String xml, String... ignoredPaths) throws Exception { return (Map<String, Object>) normalize( null, XML.toJSONObject(xml), null, ImmutableSet.copyOf(ignoredPaths), ImmutableMap.of()).getValue(); } }
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. /** * ReservationInfoType.java * * This file was auto-generated from WSDL * by the Apache Axis2 version: 1.5.1 Built on : Oct 19, 2009 (10:59:34 EDT) */ package com.amazon.ec2; /** * ReservationInfoType bean class */ public class ReservationInfoType implements org.apache.axis2.databinding.ADBBean{ /* This type was generated from the piece of schema that had name = ReservationInfoType Namespace URI = http://ec2.amazonaws.com/doc/2010-11-15/ Namespace Prefix = ns1 */ private static java.lang.String generatePrefix(java.lang.String namespace) { if(namespace.equals("http://ec2.amazonaws.com/doc/2010-11-15/")){ return "ns1"; } return org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix(); } /** * field for ReservationId */ protected java.lang.String localReservationId ; /** * Auto generated getter method * @return java.lang.String */ public java.lang.String getReservationId(){ return localReservationId; } /** * Auto generated setter method * @param param ReservationId */ public void setReservationId(java.lang.String param){ this.localReservationId=param; } /** * field for OwnerId */ protected java.lang.String localOwnerId ; /** * Auto generated getter method * @return java.lang.String */ public java.lang.String getOwnerId(){ return localOwnerId; } /** * Auto generated setter method * @param param OwnerId */ public void setOwnerId(java.lang.String param){ this.localOwnerId=param; } /** * field for GroupSet */ protected com.amazon.ec2.GroupSetType localGroupSet ; /** * Auto generated getter method * @return com.amazon.ec2.GroupSetType */ public com.amazon.ec2.GroupSetType getGroupSet(){ return localGroupSet; } /** * Auto generated setter method * @param param GroupSet */ public void setGroupSet(com.amazon.ec2.GroupSetType param){ this.localGroupSet=param; } /** * field for InstancesSet */ protected com.amazon.ec2.RunningInstancesSetType localInstancesSet ; /** * Auto generated getter method * @return com.amazon.ec2.RunningInstancesSetType */ public com.amazon.ec2.RunningInstancesSetType getInstancesSet(){ return localInstancesSet; } /** * Auto generated setter method * @param param InstancesSet */ public void setInstancesSet(com.amazon.ec2.RunningInstancesSetType param){ this.localInstancesSet=param; } /** * field for RequesterId */ protected java.lang.String localRequesterId ; /* This tracker boolean wil be used to detect whether the user called the set method * for this attribute. It will be used to determine whether to include this field * in the serialized XML */ protected boolean localRequesterIdTracker = false ; /** * Auto generated getter method * @return java.lang.String */ public java.lang.String getRequesterId(){ return localRequesterId; } /** * Auto generated setter method * @param param RequesterId */ public void setRequesterId(java.lang.String param){ if (param != null){ //update the setting tracker localRequesterIdTracker = true; } else { localRequesterIdTracker = false; } this.localRequesterId=param; } /** * isReaderMTOMAware * @return true if the reader supports MTOM */ public static boolean isReaderMTOMAware(javax.xml.stream.XMLStreamReader reader) { boolean isReaderMTOMAware = false; try{ isReaderMTOMAware = java.lang.Boolean.TRUE.equals(reader.getProperty(org.apache.axiom.om.OMConstants.IS_DATA_HANDLERS_AWARE)); }catch(java.lang.IllegalArgumentException e){ isReaderMTOMAware = false; } return isReaderMTOMAware; } /** * * @param parentQName * @param factory * @return org.apache.axiom.om.OMElement */ public org.apache.axiom.om.OMElement getOMElement ( final javax.xml.namespace.QName parentQName, final org.apache.axiom.om.OMFactory factory) throws org.apache.axis2.databinding.ADBException{ org.apache.axiom.om.OMDataSource dataSource = new org.apache.axis2.databinding.ADBDataSource(this,parentQName){ public void serialize(org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { ReservationInfoType.this.serialize(parentQName,factory,xmlWriter); } }; return new org.apache.axiom.om.impl.llom.OMSourcedElementImpl( parentQName,factory,dataSource); } public void serialize(final javax.xml.namespace.QName parentQName, final org.apache.axiom.om.OMFactory factory, org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{ serialize(parentQName,factory,xmlWriter,false); } public void serialize(final javax.xml.namespace.QName parentQName, final org.apache.axiom.om.OMFactory factory, org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter, boolean serializeType) throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{ java.lang.String prefix = null; java.lang.String namespace = null; prefix = parentQName.getPrefix(); namespace = parentQName.getNamespaceURI(); if ((namespace != null) && (namespace.trim().length() > 0)) { java.lang.String writerPrefix = xmlWriter.getPrefix(namespace); if (writerPrefix != null) { xmlWriter.writeStartElement(namespace, parentQName.getLocalPart()); } else { if (prefix == null) { prefix = generatePrefix(namespace); } xmlWriter.writeStartElement(prefix, parentQName.getLocalPart(), namespace); xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } } else { xmlWriter.writeStartElement(parentQName.getLocalPart()); } if (serializeType){ java.lang.String namespacePrefix = registerPrefix(xmlWriter,"http://ec2.amazonaws.com/doc/2010-11-15/"); if ((namespacePrefix != null) && (namespacePrefix.trim().length() > 0)){ writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type", namespacePrefix+":ReservationInfoType", xmlWriter); } else { writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type", "ReservationInfoType", xmlWriter); } } namespace = "http://ec2.amazonaws.com/doc/2010-11-15/"; if (! namespace.equals("")) { prefix = xmlWriter.getPrefix(namespace); if (prefix == null) { prefix = generatePrefix(namespace); xmlWriter.writeStartElement(prefix,"reservationId", namespace); xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } else { xmlWriter.writeStartElement(namespace,"reservationId"); } } else { xmlWriter.writeStartElement("reservationId"); } if (localReservationId==null){ // write the nil attribute throw new org.apache.axis2.databinding.ADBException("reservationId cannot be null!!"); }else{ xmlWriter.writeCharacters(localReservationId); } xmlWriter.writeEndElement(); namespace = "http://ec2.amazonaws.com/doc/2010-11-15/"; if (! namespace.equals("")) { prefix = xmlWriter.getPrefix(namespace); if (prefix == null) { prefix = generatePrefix(namespace); xmlWriter.writeStartElement(prefix,"ownerId", namespace); xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } else { xmlWriter.writeStartElement(namespace,"ownerId"); } } else { xmlWriter.writeStartElement("ownerId"); } if (localOwnerId==null){ // write the nil attribute throw new org.apache.axis2.databinding.ADBException("ownerId cannot be null!!"); }else{ xmlWriter.writeCharacters(localOwnerId); } xmlWriter.writeEndElement(); if (localGroupSet==null){ throw new org.apache.axis2.databinding.ADBException("groupSet cannot be null!!"); } localGroupSet.serialize(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2010-11-15/","groupSet"), factory,xmlWriter); if (localInstancesSet==null){ throw new org.apache.axis2.databinding.ADBException("instancesSet cannot be null!!"); } localInstancesSet.serialize(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2010-11-15/","instancesSet"), factory,xmlWriter); if (localRequesterIdTracker){ namespace = "http://ec2.amazonaws.com/doc/2010-11-15/"; if (! namespace.equals("")) { prefix = xmlWriter.getPrefix(namespace); if (prefix == null) { prefix = generatePrefix(namespace); xmlWriter.writeStartElement(prefix,"requesterId", namespace); xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } else { xmlWriter.writeStartElement(namespace,"requesterId"); } } else { xmlWriter.writeStartElement("requesterId"); } if (localRequesterId==null){ // write the nil attribute throw new org.apache.axis2.databinding.ADBException("requesterId cannot be null!!"); }else{ xmlWriter.writeCharacters(localRequesterId); } xmlWriter.writeEndElement(); } xmlWriter.writeEndElement(); } /** * Util method to write an attribute with the ns prefix */ private void writeAttribute(java.lang.String prefix,java.lang.String namespace,java.lang.String attName, java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{ if (xmlWriter.getPrefix(namespace) == null) { xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } xmlWriter.writeAttribute(namespace,attName,attValue); } /** * Util method to write an attribute without the ns prefix */ private void writeAttribute(java.lang.String namespace,java.lang.String attName, java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{ if (namespace.equals("")) { xmlWriter.writeAttribute(attName,attValue); } else { registerPrefix(xmlWriter, namespace); xmlWriter.writeAttribute(namespace,attName,attValue); } } /** * Util method to write an attribute without the ns prefix */ private void writeQNameAttribute(java.lang.String namespace, java.lang.String attName, javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { java.lang.String attributeNamespace = qname.getNamespaceURI(); java.lang.String attributePrefix = xmlWriter.getPrefix(attributeNamespace); if (attributePrefix == null) { attributePrefix = registerPrefix(xmlWriter, attributeNamespace); } java.lang.String attributeValue; if (attributePrefix.trim().length() > 0) { attributeValue = attributePrefix + ":" + qname.getLocalPart(); } else { attributeValue = qname.getLocalPart(); } if (namespace.equals("")) { xmlWriter.writeAttribute(attName, attributeValue); } else { registerPrefix(xmlWriter, namespace); xmlWriter.writeAttribute(namespace, attName, attributeValue); } } /** * method to handle Qnames */ private void writeQName(javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { java.lang.String namespaceURI = qname.getNamespaceURI(); if (namespaceURI != null) { java.lang.String prefix = xmlWriter.getPrefix(namespaceURI); if (prefix == null) { prefix = generatePrefix(namespaceURI); xmlWriter.writeNamespace(prefix, namespaceURI); xmlWriter.setPrefix(prefix,namespaceURI); } if (prefix.trim().length() > 0){ xmlWriter.writeCharacters(prefix + ":" + org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } else { // i.e this is the default namespace xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } } else { xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } } private void writeQNames(javax.xml.namespace.QName[] qnames, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { if (qnames != null) { // we have to store this data until last moment since it is not possible to write any // namespace data after writing the charactor data java.lang.StringBuffer stringToWrite = new java.lang.StringBuffer(); java.lang.String namespaceURI = null; java.lang.String prefix = null; for (int i = 0; i < qnames.length; i++) { if (i > 0) { stringToWrite.append(" "); } namespaceURI = qnames[i].getNamespaceURI(); if (namespaceURI != null) { prefix = xmlWriter.getPrefix(namespaceURI); if ((prefix == null) || (prefix.length() == 0)) { prefix = generatePrefix(namespaceURI); xmlWriter.writeNamespace(prefix, namespaceURI); xmlWriter.setPrefix(prefix,namespaceURI); } if (prefix.trim().length() > 0){ stringToWrite.append(prefix).append(":").append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } else { stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } } else { stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } } xmlWriter.writeCharacters(stringToWrite.toString()); } } /** * Register a namespace prefix */ private java.lang.String registerPrefix(javax.xml.stream.XMLStreamWriter xmlWriter, java.lang.String namespace) throws javax.xml.stream.XMLStreamException { java.lang.String prefix = xmlWriter.getPrefix(namespace); if (prefix == null) { prefix = generatePrefix(namespace); while (xmlWriter.getNamespaceContext().getNamespaceURI(prefix) != null) { prefix = org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix(); } xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } return prefix; } /** * databinding method to get an XML representation of this object * */ public javax.xml.stream.XMLStreamReader getPullParser(javax.xml.namespace.QName qName) throws org.apache.axis2.databinding.ADBException{ java.util.ArrayList elementList = new java.util.ArrayList(); java.util.ArrayList attribList = new java.util.ArrayList(); elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2010-11-15/", "reservationId")); if (localReservationId != null){ elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localReservationId)); } else { throw new org.apache.axis2.databinding.ADBException("reservationId cannot be null!!"); } elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2010-11-15/", "ownerId")); if (localOwnerId != null){ elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localOwnerId)); } else { throw new org.apache.axis2.databinding.ADBException("ownerId cannot be null!!"); } elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2010-11-15/", "groupSet")); if (localGroupSet==null){ throw new org.apache.axis2.databinding.ADBException("groupSet cannot be null!!"); } elementList.add(localGroupSet); elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2010-11-15/", "instancesSet")); if (localInstancesSet==null){ throw new org.apache.axis2.databinding.ADBException("instancesSet cannot be null!!"); } elementList.add(localInstancesSet); if (localRequesterIdTracker){ elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2010-11-15/", "requesterId")); if (localRequesterId != null){ elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localRequesterId)); } else { throw new org.apache.axis2.databinding.ADBException("requesterId cannot be null!!"); } } return new org.apache.axis2.databinding.utils.reader.ADBXMLStreamReaderImpl(qName, elementList.toArray(), attribList.toArray()); } /** * Factory class that keeps the parse method */ public static class Factory{ /** * static method to create the object * Precondition: If this object is an element, the current or next start element starts this object and any intervening reader events are ignorable * If this object is not an element, it is a complex type and the reader is at the event just after the outer start element * Postcondition: If this object is an element, the reader is positioned at its end element * If this object is a complex type, the reader is positioned at the end element of its outer element */ public static ReservationInfoType parse(javax.xml.stream.XMLStreamReader reader) throws java.lang.Exception{ ReservationInfoType object = new ReservationInfoType(); int event; java.lang.String nillableValue = null; java.lang.String prefix =""; java.lang.String namespaceuri =""; try { while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); if (reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance","type")!=null){ java.lang.String fullTypeName = reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance", "type"); if (fullTypeName!=null){ java.lang.String nsPrefix = null; if (fullTypeName.indexOf(":") > -1){ nsPrefix = fullTypeName.substring(0,fullTypeName.indexOf(":")); } nsPrefix = nsPrefix==null?"":nsPrefix; java.lang.String type = fullTypeName.substring(fullTypeName.indexOf(":")+1); if (!"ReservationInfoType".equals(type)){ //find namespace for the prefix java.lang.String nsUri = reader.getNamespaceContext().getNamespaceURI(nsPrefix); return (ReservationInfoType)com.amazon.ec2.ExtensionMapper.getTypeObject( nsUri,type,reader); } } } // Note all attributes that were handled. Used to differ normal attributes // from anyAttributes. java.util.Vector handledAttributes = new java.util.Vector(); reader.next(); while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2010-11-15/","reservationId").equals(reader.getName())){ java.lang.String content = reader.getElementText(); object.setReservationId( org.apache.axis2.databinding.utils.ConverterUtil.convertToString(content)); reader.next(); } // End of if for expected property start element else{ // A start element we are not expecting indicates an invalid parameter was passed throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName()); } while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2010-11-15/","ownerId").equals(reader.getName())){ java.lang.String content = reader.getElementText(); object.setOwnerId( org.apache.axis2.databinding.utils.ConverterUtil.convertToString(content)); reader.next(); } // End of if for expected property start element else{ // A start element we are not expecting indicates an invalid parameter was passed throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName()); } while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2010-11-15/","groupSet").equals(reader.getName())){ object.setGroupSet(com.amazon.ec2.GroupSetType.Factory.parse(reader)); reader.next(); } // End of if for expected property start element else{ // A start element we are not expecting indicates an invalid parameter was passed throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName()); } while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2010-11-15/","instancesSet").equals(reader.getName())){ object.setInstancesSet(com.amazon.ec2.RunningInstancesSetType.Factory.parse(reader)); reader.next(); } // End of if for expected property start element else{ // A start element we are not expecting indicates an invalid parameter was passed throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName()); } while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2010-11-15/","requesterId").equals(reader.getName())){ java.lang.String content = reader.getElementText(); object.setRequesterId( org.apache.axis2.databinding.utils.ConverterUtil.convertToString(content)); reader.next(); } // End of if for expected property start element else { } while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); if (reader.isStartElement()) // A start element we are not expecting indicates a trailing invalid property throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName()); } catch (javax.xml.stream.XMLStreamException e) { throw new java.lang.Exception(e); } return object; } }//end of factory class }
/* * Copyright (c) 2003, 2007, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ /** * * @author Sean Mullan * @author Steve Hanna * */ import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.InputStream; import java.io.IOException; import java.security.cert.CertificateFactory; import java.security.cert.CertPath; import java.security.cert.CertPathBuilder; import java.security.cert.CertPathValidator; import java.security.cert.CertStore; import java.security.cert.CollectionCertStoreParameters; import java.security.cert.PKIXBuilderParameters; import java.security.cert.PKIXCertPathBuilderResult; import java.security.cert.PKIXCertPathValidatorResult; import java.security.cert.PKIXParameters; import java.security.cert.X509Certificate; import java.security.cert.X509CRL; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; /** * Static utility methods useful for testing certificate/certpath APIs. */ public class CertUtils { private CertUtils() {} /** * Get a DER-encoded X.509 certificate from a file. * * @param certFilePath path to file containing DER-encoded certificate * @return X509Certificate * @throws IOException on error */ public static X509Certificate getCertFromFile(String certFilePath) throws IOException { X509Certificate cert = null; try { File certFile = new File(System.getProperty("test.src", "."), certFilePath); if (!certFile.canRead()) throw new IOException("File " + certFile.toString() + " is not a readable file."); FileInputStream certFileInputStream = new FileInputStream(certFile); CertificateFactory cf = CertificateFactory.getInstance("X509"); cert = (X509Certificate) cf.generateCertificate(certFileInputStream); } catch (Exception e) { e.printStackTrace(); throw new IOException("Can't construct X509Certificate: " + e.getMessage()); } return cert; } /** * Get a DER-encoded X.509 CRL from a file. * * @param crlFilePath path to file containing DER-encoded CRL * @return X509CRL * @throws IOException on error */ public static X509CRL getCRLFromFile(String crlFilePath) throws IOException { X509CRL crl = null; try { File crlFile = new File(System.getProperty("test.src", "."), crlFilePath); if (!crlFile.canRead()) throw new IOException("File " + crlFile.toString() + " is not a readable file."); FileInputStream crlFileInputStream = new FileInputStream(crlFile); CertificateFactory cf = CertificateFactory.getInstance("X509"); crl = (X509CRL) cf.generateCRL(crlFileInputStream); } catch (Exception e) { e.printStackTrace(); throw new IOException("Can't construct X509CRL: " + e.getMessage()); } return crl; } /** * Read a bunch of certs from files and create a CertPath from them. * * @param fileNames an array of <code>String</code>s that are file names * @throws Exception on error */ public static CertPath buildPath(String [] fileNames) throws Exception { return buildPath("", fileNames); } /** * Read a bunch of certs from files and create a CertPath from them. * * @param relPath relative path containing certs (must end in * file.separator) * @param fileNames an array of <code>String</code>s that are file names * @throws Exception on error */ public static CertPath buildPath(String relPath, String [] fileNames) throws Exception { List<X509Certificate> list = new ArrayList<X509Certificate>(); for (int i = 0; i < fileNames.length; i++) { list.add(0, getCertFromFile(relPath + fileNames[i])); } CertificateFactory cf = CertificateFactory.getInstance("X509"); return(cf.generateCertPath(list)); } /** * Read a bunch of certs from files and create a CertStore from them. * * @param fileNames an array of <code>String</code>s that are file names * @return the <code>CertStore</code> created * @throws Exception on error */ public static CertStore createStore(String [] fileNames) throws Exception { return createStore("", fileNames); } /** * Read a bunch of certs from files and create a CertStore from them. * * @param relPath relative path containing certs (must end in * file.separator) * @param fileNames an array of <code>String</code>s that are file names * @return the <code>CertStore</code> created * @throws Exception on error */ public static CertStore createStore(String relPath, String [] fileNames) throws Exception { Set<X509Certificate> certs = new HashSet<X509Certificate>(); for (int i = 0; i < fileNames.length; i++) { certs.add(getCertFromFile(relPath + fileNames[i])); } return CertStore.getInstance("Collection", new CollectionCertStoreParameters(certs)); } /** * Read a bunch of CRLs from files and create a CertStore from them. * * @param fileNames an array of <code>String</code>s that are file names * @return the <code>CertStore</code> created * @throws Exception on error */ public static CertStore createCRLStore(String [] fileNames) throws Exception { return createCRLStore("", fileNames); } /** * Read a bunch of CRLs from files and create a CertStore from them. * * @param relPath relative path containing CRLs (must end in file.separator) * @param fileNames an array of <code>String</code>s that are file names * @return the <code>CertStore</code> created * @throws Exception on error */ public static CertStore createCRLStore(String relPath, String [] fileNames) throws Exception { Set<X509CRL> crls = new HashSet<X509CRL>(); for (int i = 0; i < fileNames.length; i++) { crls.add(getCRLFromFile(relPath + fileNames[i])); } return CertStore.getInstance("Collection", new CollectionCertStoreParameters(crls)); } /** * Perform a PKIX path build. On failure, throw an exception. * * @param params PKIXBuilderParameters to use in validation * @throws Exception on error */ public static PKIXCertPathBuilderResult build(PKIXBuilderParameters params) throws Exception { CertPathBuilder builder = CertPathBuilder.getInstance("PKIX"); return (PKIXCertPathBuilderResult) builder.build(params); } /** * Perform a PKIX validation. On failure, throw an exception. * * @param path CertPath to validate * @param params PKIXParameters to use in validation * @throws Exception on error */ public static PKIXCertPathValidatorResult validate (CertPath path, PKIXParameters params) throws Exception { CertPathValidator validator = CertPathValidator.getInstance("PKIX"); return (PKIXCertPathValidatorResult) validator.validate(path, params); } /* * Reads the entire input stream into a byte array. */ private static byte[] getTotalBytes(InputStream is) throws IOException { byte[] buffer = new byte[8192]; ByteArrayOutputStream baos = new ByteArrayOutputStream(2048); int n; baos.reset(); while ((n = is.read(buffer, 0, buffer.length)) != -1) { baos.write(buffer, 0, n); } return baos.toByteArray(); } }
/* * Copyright (c) 2014, Harald Kuhr * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * * Neither the name of the copyright holder nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.twelvemonkeys.imageio.plugins.tiff; import com.twelvemonkeys.imageio.metadata.Directory; import com.twelvemonkeys.imageio.metadata.Entry; import com.twelvemonkeys.imageio.metadata.tiff.Rational; import com.twelvemonkeys.imageio.metadata.tiff.TIFF; import com.twelvemonkeys.imageio.metadata.tiff.TIFFReader; import com.twelvemonkeys.imageio.stream.ByteArrayImageInputStream; import com.twelvemonkeys.imageio.util.ImageWriterAbstractTest; import com.twelvemonkeys.io.FastByteArrayOutputStream; import com.twelvemonkeys.io.NullOutputStream; import org.junit.Test; import org.w3c.dom.NodeList; import javax.imageio.*; import javax.imageio.event.IIOWriteProgressListener; import javax.imageio.metadata.IIOMetadata; import javax.imageio.metadata.IIOMetadataFormatImpl; import javax.imageio.metadata.IIOMetadataNode; import javax.imageio.spi.ImageWriterSpi; import javax.imageio.stream.FileCacheImageOutputStream; import javax.imageio.stream.FileImageOutputStream; import javax.imageio.stream.ImageInputStream; import javax.imageio.stream.ImageOutputStream; import java.awt.*; import java.awt.image.BufferedImage; import java.awt.image.RenderedImage; import java.io.*; import java.net.URL; import java.nio.ByteOrder; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import static com.twelvemonkeys.imageio.plugins.tiff.TIFFImageMetadataFormat.SUN_NATIVE_IMAGE_METADATA_FORMAT_NAME; import static com.twelvemonkeys.imageio.plugins.tiff.TIFFImageMetadataTest.createTIFFFieldNode; import static com.twelvemonkeys.imageio.util.ImageReaderAbstractTest.assertRGBEquals; import static org.junit.Assert.*; import static org.junit.Assume.assumeNotNull; import static org.mockito.Mockito.*; /** * TIFFImageWriterTest * * @author <a href="mailto:harald.kuhr@gmail.com">Harald Kuhr</a> * @author last modified by $Author: haraldk$ * @version $Id: TIFFImageWriterTest.java,v 1.0 19.09.13 13:22 haraldk Exp$ */ public class TIFFImageWriterTest extends ImageWriterAbstractTest<TIFFImageWriter> { @Override protected ImageWriterSpi createProvider() { return new TIFFImageWriterSpi(); } @Override protected List<? extends RenderedImage> getTestData() { return Arrays.asList( new BufferedImage(300, 200, BufferedImage.TYPE_INT_RGB), new BufferedImage(301, 199, BufferedImage.TYPE_INT_ARGB), new BufferedImage(299, 201, BufferedImage.TYPE_3BYTE_BGR), new BufferedImage(160, 90, BufferedImage.TYPE_4BYTE_ABGR), new BufferedImage(90, 160, BufferedImage.TYPE_BYTE_GRAY), new BufferedImage(30, 20, BufferedImage.TYPE_USHORT_GRAY), new BufferedImage(30, 20, BufferedImage.TYPE_BYTE_BINARY), new BufferedImage(30, 20, BufferedImage.TYPE_BYTE_INDEXED) ); } // TODO: Test write bilevel stays bilevel // TODO: Test write indexed stays indexed @Test public void testWriteWithCustomResolutionNative() throws IOException { // Issue 139 Writing TIFF files with custom resolution value Rational resolutionValue = new Rational(1200); int resolutionUnitValue = TIFFBaseline.RESOLUTION_UNIT_CENTIMETER; RenderedImage image = getTestData(0); ImageWriter writer = createWriter(); ByteArrayOutputStream buffer = new ByteArrayOutputStream(); try (ImageOutputStream stream = ImageIO.createImageOutputStream(buffer)) { writer.setOutput(stream); String nativeFormat = SUN_NATIVE_IMAGE_METADATA_FORMAT_NAME; IIOMetadata metadata = writer.getDefaultImageMetadata(ImageTypeSpecifier.createFromRenderedImage(image), null); IIOMetadataNode customMeta = new IIOMetadataNode(nativeFormat); IIOMetadataNode ifd = new IIOMetadataNode("TIFFIFD"); customMeta.appendChild(ifd); createTIFFFieldNode(ifd, TIFF.TAG_RESOLUTION_UNIT, TIFF.TYPE_SHORT, resolutionUnitValue); createTIFFFieldNode(ifd, TIFF.TAG_X_RESOLUTION, TIFF.TYPE_RATIONAL, resolutionValue); createTIFFFieldNode(ifd, TIFF.TAG_Y_RESOLUTION, TIFF.TYPE_RATIONAL, resolutionValue); metadata.mergeTree(nativeFormat, customMeta); writer.write(null, new IIOImage(image, null, metadata), null); } catch (IOException e) { e.printStackTrace(); fail(e.getMessage()); } assertTrue("No image data written", buffer.size() > 0); Directory ifds = new TIFFReader().read(new ByteArrayImageInputStream(buffer.toByteArray())); Entry resolutionUnit = ifds.getEntryById(TIFF.TAG_RESOLUTION_UNIT); assertNotNull(resolutionUnit); assertEquals(resolutionUnitValue, ((Number) resolutionUnit.getValue()).intValue()); Entry xResolution = ifds.getEntryById(TIFF.TAG_X_RESOLUTION); assertNotNull(xResolution); assertEquals(resolutionValue, xResolution.getValue()); Entry yResolution = ifds.getEntryById(TIFF.TAG_Y_RESOLUTION); assertNotNull(yResolution); assertEquals(resolutionValue, yResolution.getValue()); } @Test public void testWriteWithCustomSoftwareNative() throws IOException { String softwareString = "12M TIFF Test 1.0 (build $foo$)"; RenderedImage image = getTestData(0); ImageWriter writer = createWriter(); ByteArrayOutputStream buffer = new ByteArrayOutputStream(); try (ImageOutputStream stream = ImageIO.createImageOutputStream(buffer)) { writer.setOutput(stream); String nativeFormat = SUN_NATIVE_IMAGE_METADATA_FORMAT_NAME; IIOMetadata metadata = writer.getDefaultImageMetadata(ImageTypeSpecifier.createFromRenderedImage(image), null); IIOMetadataNode customMeta = new IIOMetadataNode(nativeFormat); IIOMetadataNode ifd = new IIOMetadataNode("TIFFIFD"); customMeta.appendChild(ifd); createTIFFFieldNode(ifd, TIFF.TAG_SOFTWARE, TIFF.TYPE_ASCII, softwareString); metadata.mergeTree(nativeFormat, customMeta); writer.write(null, new IIOImage(image, null, metadata), null); } catch (IOException e) { e.printStackTrace(); fail(e.getMessage()); } assertTrue("No image data written", buffer.size() > 0); Directory ifds = new TIFFReader().read(new ByteArrayImageInputStream(buffer.toByteArray())); Entry software = ifds.getEntryById(TIFF.TAG_SOFTWARE); assertNotNull(software); assertEquals(softwareString, software.getValueAsString()); } @Test public void testWriteWithCustomResolutionStandard() throws IOException { // Issue 139 Writing TIFF files with custom resolution value double resolutionValue = 300 / 25.4; // 300 dpi, 1 inch = 2.54 cm or 25.4 mm int resolutionUnitValue = TIFFBaseline.RESOLUTION_UNIT_CENTIMETER; Rational expectedResolutionValue = new Rational(Math.round(resolutionValue * 10 * TIFFImageMetadata.RATIONAL_SCALE_FACTOR), TIFFImageMetadata.RATIONAL_SCALE_FACTOR); RenderedImage image = getTestData(0); ImageWriter writer = createWriter(); ByteArrayOutputStream buffer = new ByteArrayOutputStream(); try (ImageOutputStream stream = ImageIO.createImageOutputStream(buffer)) { writer.setOutput(stream); String standardFormat = IIOMetadataFormatImpl.standardMetadataFormatName; IIOMetadata metadata = writer.getDefaultImageMetadata(ImageTypeSpecifier.createFromRenderedImage(image), null); IIOMetadataNode customMeta = new IIOMetadataNode(standardFormat); IIOMetadataNode dimension = new IIOMetadataNode("Dimension"); customMeta.appendChild(dimension); IIOMetadataNode xSize = new IIOMetadataNode("HorizontalPixelSize"); dimension.appendChild(xSize); xSize.setAttribute("value", String.valueOf(resolutionValue)); IIOMetadataNode ySize = new IIOMetadataNode("VerticalPixelSize"); dimension.appendChild(ySize); ySize.setAttribute("value", String.valueOf(resolutionValue)); metadata.mergeTree(standardFormat, customMeta); writer.write(null, new IIOImage(image, null, metadata), null); } catch (IOException e) { e.printStackTrace(); fail(e.getMessage()); } assertTrue("No image data written", buffer.size() > 0); Directory ifds = new TIFFReader().read(new ByteArrayImageInputStream(buffer.toByteArray())); Entry resolutionUnit = ifds.getEntryById(TIFF.TAG_RESOLUTION_UNIT); assertNotNull(resolutionUnit); assertEquals(resolutionUnitValue, ((Number) resolutionUnit.getValue()).intValue()); Entry xResolution = ifds.getEntryById(TIFF.TAG_X_RESOLUTION); assertNotNull(xResolution); assertEquals(expectedResolutionValue, xResolution.getValue()); Entry yResolution = ifds.getEntryById(TIFF.TAG_Y_RESOLUTION); assertNotNull(yResolution); assertEquals(expectedResolutionValue, yResolution.getValue()); } @Test public void testWriteWithCustomSoftwareStandard() throws IOException { String softwareString = "12M TIFF Test 1.0 (build $foo$)"; RenderedImage image = getTestData(0); ImageWriter writer = createWriter(); ByteArrayOutputStream buffer = new ByteArrayOutputStream(); try (ImageOutputStream stream = ImageIO.createImageOutputStream(buffer)) { writer.setOutput(stream); String standardFormat = IIOMetadataFormatImpl.standardMetadataFormatName; IIOMetadata metadata = writer.getDefaultImageMetadata(ImageTypeSpecifier.createFromRenderedImage(image), null); IIOMetadataNode customMeta = new IIOMetadataNode(standardFormat); IIOMetadataNode dimension = new IIOMetadataNode("Text"); customMeta.appendChild(dimension); IIOMetadataNode textEntry = new IIOMetadataNode("TextEntry"); dimension.appendChild(textEntry); textEntry.setAttribute("keyword", "Software"); textEntry.setAttribute("value", softwareString); metadata.mergeTree(standardFormat, customMeta); writer.write(null, new IIOImage(image, null, metadata), null); } catch (IOException e) { e.printStackTrace(); fail(e.getMessage()); } assertTrue("No image data written", buffer.size() > 0); Directory ifds = new TIFFReader().read(new ByteArrayImageInputStream(buffer.toByteArray())); Entry software = ifds.getEntryById(TIFF.TAG_SOFTWARE); assertNotNull(software); assertEquals(softwareString, software.getValueAsString()); } @Test(expected = IllegalArgumentException.class) public void testWriteIncompatibleCompression() throws IOException { ImageWriter writer = createWriter(); ByteArrayOutputStream buffer = new ByteArrayOutputStream(); try (ImageOutputStream output = ImageIO.createImageOutputStream(buffer)) { writer.setOutput(output); try { ImageWriteParam param = writer.getDefaultWriteParam(); param.setCompressionMode(ImageWriteParam.MODE_EXPLICIT); param.setCompressionType("CCITT T.6"); writer.write(null, new IIOImage(new BufferedImage(8, 8, BufferedImage.TYPE_INT_RGB), null, null), param); fail(); } catch (IOException e) { fail(e.getMessage()); } } } @Test public void testWriterCanWriteSequence() throws IOException { ImageWriter writer = createWriter(); assertTrue("Writer should support sequence writing", writer.canWriteSequence()); } @Test(expected = IllegalStateException.class) public void testWriteSequenceWithoutPrepare() throws IOException { ImageWriter writer = createWriter(); ByteArrayOutputStream buffer = new ByteArrayOutputStream(); try (ImageOutputStream output = ImageIO.createImageOutputStream(buffer)) { writer.setOutput(output); writer.writeToSequence(new IIOImage(new BufferedImage(10, 10, BufferedImage.TYPE_3BYTE_BGR), null, null), null); } } @Test(expected = IllegalStateException.class) public void testEndSequenceWithoutPrepare() throws IOException { ImageWriter writer = createWriter(); ByteArrayOutputStream buffer = new ByteArrayOutputStream(); try (ImageOutputStream output = ImageIO.createImageOutputStream(buffer)) { writer.setOutput(output); writer.endWriteSequence(); } } private void assertWriteSequence(Class<? extends ImageOutputStream> iosClass, String... compression) throws IOException { BufferedImage image = new BufferedImage(13, 13, BufferedImage.TYPE_BYTE_GRAY); Graphics2D g2d = image.createGraphics(); try { g2d.setColor(Color.WHITE); g2d.fillRect(image.getWidth() / 4, image.getHeight() / 4, image.getWidth() / 2, image.getHeight() / 2); } finally { g2d.dispose(); } boolean isFileDirect = iosClass == FileImageOutputStream.class; Object destination = isFileDirect ? File.createTempFile("temp-", ".tif") : new ByteArrayOutputStream(1024); ImageWriter writer = createWriter(); try (ImageOutputStream output = isFileDirect ? new FileImageOutputStream((File) destination) : new FileCacheImageOutputStream((OutputStream) destination, ImageIO.getCacheDirectory())) { writer.setOutput(output); ImageWriteParam params = writer.getDefaultWriteParam(); params.setCompressionMode(ImageWriteParam.MODE_EXPLICIT); try { writer.prepareWriteSequence(null); for (String compressionType : compression) { params.setCompressionType(compressionType); writer.writeToSequence(new IIOImage(image, null, null), params); } writer.endWriteSequence(); } catch (IOException e) { fail(e.getMessage()); } } try (ImageInputStream input = ImageIO.createImageInputStream(isFileDirect ? destination : new ByteArrayInputStream(((ByteArrayOutputStream) destination).toByteArray()))) { ImageReader reader = ImageIO.getImageReaders(input).next(); reader.setInput(input); assertEquals("wrong image count", compression.length, reader.getNumImages(true)); for (int i = 0; i < reader.getNumImages(true); i++) { assertImageEquals("image " + i + " differs", image, reader.read(i), 5); // Allow room for JPEG compression } } } @Test public void testWriteSequenceFileImageOutputStreamUncompressed() throws IOException { assertWriteSequence(FileImageOutputStream.class, "None", "None"); } @Test public void testWriteSequenceFileImageOutputCompressed() throws IOException { assertWriteSequence(FileImageOutputStream.class, "LZW", "Deflate"); } @Test public void testWriteSequenceFileImageOutputStreamUncompressedCompressed() throws IOException { assertWriteSequence(FileImageOutputStream.class, "None", "LZW", "None"); } @Test public void testWriteSequenceFileImageOutputStreamCompressedUncompressed() throws IOException { assertWriteSequence(FileImageOutputStream.class, "Deflate", "None", "Deflate"); } @Test public void testWriteSequenceFileCacheImageOutputStreamUncompressed() throws IOException { assertWriteSequence(FileCacheImageOutputStream.class, "None", "None"); } @Test public void testWriteSequenceFileCacheImageOutputStreamCompressed() throws IOException { assertWriteSequence(FileCacheImageOutputStream.class, "Deflate", "LZW"); } @Test public void testWriteSequenceFileCacheImageOutputStreamCompressedUncompressed() throws IOException { assertWriteSequence(FileCacheImageOutputStream.class, "LZW", "None", "LZW"); } @Test public void testWriteSequenceFileCacheImageOutputStreamUncompressedCompressed() throws IOException { assertWriteSequence(FileCacheImageOutputStream.class, "None", "Deflate", "None"); } @Test public void testWriteSequence() throws IOException { BufferedImage[] images = new BufferedImage[] { new BufferedImage(100, 100, BufferedImage.TYPE_INT_RGB), new BufferedImage(110, 100, BufferedImage.TYPE_3BYTE_BGR), new BufferedImage(120, 100, BufferedImage.TYPE_INT_RGB), new BufferedImage(140, 100, BufferedImage.TYPE_INT_ARGB), new BufferedImage(130, 100, BufferedImage.TYPE_BYTE_GRAY), new BufferedImage(150, 100, BufferedImage.TYPE_BYTE_BINARY), new BufferedImage(160, 100, BufferedImage.TYPE_BYTE_BINARY) }; Color[] colors = new Color[] {Color.RED, Color.GREEN, Color.BLUE, Color.ORANGE, Color.PINK, Color.WHITE, Color.GRAY}; for (int i = 0; i < images.length; i++) { BufferedImage image = images[i]; Graphics2D g2d = image.createGraphics(); try { g2d.setColor(colors[i]); g2d.fillRect(0, 0, image.getWidth(), image.getHeight()); } finally { g2d.dispose(); } } ImageWriter writer = createWriter(); ByteArrayOutputStream buffer = new ByteArrayOutputStream(); try (ImageOutputStream output = ImageIO.createImageOutputStream(buffer)) { writer.setOutput(output); ImageWriteParam params = writer.getDefaultWriteParam(); params.setCompressionMode(ImageWriteParam.MODE_EXPLICIT); try { writer.prepareWriteSequence(null); params.setCompressionType("LZW"); writer.writeToSequence(new IIOImage(images[0], null, null), params); params.setCompressionType("None"); writer.writeToSequence(new IIOImage(images[1], null, null), params); params.setCompressionType("JPEG"); writer.writeToSequence(new IIOImage(images[2], null, null), params); params.setCompressionType("PackBits"); writer.writeToSequence(new IIOImage(images[3], null, null), params); params.setCompressionType("Deflate"); writer.writeToSequence(new IIOImage(images[4], null, null), params); params.setCompressionType("CCITT T.4"); writer.writeToSequence(new IIOImage(images[5], null, null), params); params.setCompressionType("CCITT T.6"); writer.writeToSequence(new IIOImage(images[6], null, null), params); writer.endWriteSequence(); } catch (IOException e) { fail(e.getMessage()); } } try (ImageInputStream input = ImageIO.createImageInputStream(new ByteArrayInputStream(buffer.toByteArray()))) { ImageReader reader = ImageIO.getImageReaders(input).next(); reader.setInput(input); assertEquals("wrong image count", images.length, reader.getNumImages(true)); for (int i = 0; i < reader.getNumImages(true); i++) { assertImageEquals("image " + i + " differs", images[i], reader.read(i), 5); // Allow room for JPEG compression } } } @Test public void testWriteSequenceProgress() throws IOException { BufferedImage[] images = new BufferedImage[] { new BufferedImage(100, 100, BufferedImage.TYPE_INT_RGB), new BufferedImage(110, 100, BufferedImage.TYPE_INT_RGB), new BufferedImage(120, 100, BufferedImage.TYPE_INT_RGB) }; ImageWriter writer = createWriter(); IIOWriteProgressListener progress = mock(IIOWriteProgressListener.class, "progress"); writer.addIIOWriteProgressListener(progress); try (ImageOutputStream output = ImageIO.createImageOutputStream(new NullOutputStream())) { writer.setOutput(output); try { writer.prepareWriteSequence(null); for (int i = 0; i < images.length; i++) { reset(progress); ImageWriteParam param = writer.getDefaultWriteParam(); if (i == images.length - 1) { // Make sure that the JPEG delegation outputs the correct indexes param.setCompressionMode(ImageWriteParam.MODE_EXPLICIT); param.setCompressionType("JPEG"); } writer.writeToSequence(new IIOImage(images[i], null, null), param); verify(progress, times(1)).imageStarted(writer, i); verify(progress, atLeastOnce()).imageProgress(eq(writer), anyFloat()); verify(progress, times(1)).imageComplete(writer); } writer.endWriteSequence(); } catch (IOException e) { fail(e.getMessage()); } } } @Test public void testWriteGrayNoProfile() throws IOException { ImageWriter writer = createWriter(); FastByteArrayOutputStream bytes = new FastByteArrayOutputStream(512); try (ImageOutputStream output = ImageIO.createImageOutputStream(bytes)) { writer.setOutput(output); writer.write(new BufferedImage(10, 10, BufferedImage.TYPE_BYTE_GRAY)); } try (ImageInputStream input = ImageIO.createImageInputStream(bytes.createInputStream())) { ImageReader reader = ImageIO.getImageReaders(input).next(); reader.setInput(input); TIFFImageMetadata metadata = (TIFFImageMetadata) reader.getImageMetadata(0); Directory ifd = metadata.getIFD(); assertNull("Unexpected ICC profile for default gray", ifd.getEntryById(TIFF.TAG_ICC_PROFILE)); } } @Test public void testWriteParamJPEGQuality() throws IOException { ImageWriter writer = createWriter(); try (ImageOutputStream output = ImageIO.createImageOutputStream(new NullOutputStream())) { writer.setOutput(output); try { ImageWriteParam param = writer.getDefaultWriteParam(); // Make sure that the JPEG delegation outputs the correct indexes param.setCompressionMode(ImageWriteParam.MODE_EXPLICIT); param.setCompressionType("JPEG"); param.setCompressionQuality(.1f); writer.write(null, new IIOImage(new BufferedImage(100, 100, BufferedImage.TYPE_INT_RGB), null, null), param); // In a perfect world, we should verify that the parameter was passed to the JPEG delegate... } catch (IOException e) { fail(e.getMessage()); } } } @Test public void testReadWriteRead1BitLZW() throws IOException { // Read original LZW compressed TIFF IIOImage original; try (ImageInputStream input = ImageIO.createImageInputStream(getClassLoaderResource("/tiff/a33.tif"))) { ImageReader reader = ImageIO.getImageReaders(input).next(); reader.setInput(input); original = reader.readAll(0, null); reader.dispose(); } assumeNotNull(original); // Write it back, using same compression (copied from metadata) FastByteArrayOutputStream buffer = new FastByteArrayOutputStream(32768); try (ImageOutputStream output = ImageIO.createImageOutputStream(buffer)) { ImageWriter writer = createWriter(); writer.setOutput(output); writer.write(original); writer.dispose(); } // Try re-reading the same TIFF try (ImageInputStream input = ImageIO.createImageInputStream(buffer.createInputStream())) { ImageReader reader = ImageIO.getImageReaders(input).next(); reader.setInput(input); BufferedImage image = reader.read(0); BufferedImage orig = (BufferedImage) original.getRenderedImage(); int maxH = Math.min(300, image.getHeight()); for (int y = 0; y < maxH; y++) { for (int x = 0; x < image.getWidth(); x++) { assertRGBEquals(String.format("Pixel differ: @%d,%d", x, y), orig.getRGB(x, y), image.getRGB(x, y), 0); } } IIOMetadata metadata = reader.getImageMetadata(0); IIOMetadataNode tree = (IIOMetadataNode) metadata.getAsTree(IIOMetadataFormatImpl.standardMetadataFormatName); IIOMetadataNode compression = (IIOMetadataNode) tree.getElementsByTagName("CompressionTypeName").item(0); assertEquals("LZW", compression.getAttribute("value")); boolean softwareFound = false; NodeList textEntries = tree.getElementsByTagName("TextEntry"); for (int i = 0; i < textEntries.getLength(); i++) { IIOMetadataNode textEntry = (IIOMetadataNode) textEntries.item(i); if ("Software".equals(textEntry.getAttribute("keyword"))) { softwareFound = true; assertEquals("IrfanView", textEntry.getAttribute("value")); } } assertTrue("Software metadata not found", softwareFound); } } @Test public void testReadWriteRead1BitDeflate() throws IOException { // Read original LZW compressed TIFF IIOImage original; try (ImageInputStream input = ImageIO.createImageInputStream(getClassLoaderResource("/tiff/a33.tif"))) { ImageReader reader = ImageIO.getImageReaders(input).next(); reader.setInput(input); original = reader.readAll(0, null); reader.dispose(); } assumeNotNull(original); // Write it back, using deflate compression FastByteArrayOutputStream buffer = new FastByteArrayOutputStream(32768); try (ImageOutputStream output = ImageIO.createImageOutputStream(buffer)) { ImageWriter writer = createWriter(); writer.setOutput(output); ImageWriteParam param = writer.getDefaultWriteParam(); param.setCompressionMode(ImageWriteParam.MODE_EXPLICIT); param.setCompressionType("Deflate"); writer.write(null, original, param); writer.dispose(); } // Try re-reading the same TIFF try (ImageInputStream input = ImageIO.createImageInputStream(buffer.createInputStream())) { ImageReader reader = ImageIO.getImageReaders(input).next(); reader.setInput(input); BufferedImage image = reader.read(0); BufferedImage orig = (BufferedImage) original.getRenderedImage(); int maxH = Math.min(300, image.getHeight()); for (int y = 0; y < maxH; y++) { for (int x = 0; x < image.getWidth(); x++) { assertRGBEquals("Pixel differ: ", orig.getRGB(x, y), image.getRGB(x, y), 0); } } IIOMetadata metadata = reader.getImageMetadata(0); IIOMetadataNode tree = (IIOMetadataNode) metadata.getAsTree(IIOMetadataFormatImpl.standardMetadataFormatName); IIOMetadataNode compression = (IIOMetadataNode) tree.getElementsByTagName("CompressionTypeName").item(0); assertEquals("Deflate", compression.getAttribute("value")); boolean softwareFound = false; NodeList textEntries = tree.getElementsByTagName("TextEntry"); for (int i = 0; i < textEntries.getLength(); i++) { IIOMetadataNode textEntry = (IIOMetadataNode) textEntries.item(i); if ("Software".equals(textEntry.getAttribute("keyword"))) { softwareFound = true; assertEquals("IrfanView", textEntry.getAttribute("value")); } } assertTrue("Software metadata not found", softwareFound); } } @Test public void testReadWriteRead1BitNone() throws IOException { // Read original LZW compressed TIFF IIOImage original; try (ImageInputStream input = ImageIO.createImageInputStream(getClassLoaderResource("/tiff/a33.tif"))) { ImageReader reader = ImageIO.getImageReaders(input).next(); reader.setInput(input); original = reader.readAll(0, null); reader.dispose(); } assumeNotNull(original); // Write it back, no compression FastByteArrayOutputStream buffer = new FastByteArrayOutputStream(32768); try (ImageOutputStream output = ImageIO.createImageOutputStream(buffer)) { ImageWriter writer = createWriter(); writer.setOutput(output); ImageWriteParam param = writer.getDefaultWriteParam(); param.setCompressionMode(ImageWriteParam.MODE_EXPLICIT); param.setCompressionType("None"); writer.write(null, original, param); writer.dispose(); } // Try re-reading the same TIFF try (ImageInputStream input = ImageIO.createImageInputStream(buffer.createInputStream())) { ImageReader reader = ImageIO.getImageReaders(input).next(); reader.setInput(input); BufferedImage image = reader.read(0); BufferedImage orig = (BufferedImage) original.getRenderedImage(); int maxH = Math.min(300, image.getHeight()); for (int y = 0; y < maxH; y++) { for (int x = 0; x < image.getWidth(); x++) { assertRGBEquals("Pixel differ: ", orig.getRGB(x, y), image.getRGB(x, y), 0); } } IIOMetadata metadata = reader.getImageMetadata(0); IIOMetadataNode tree = (IIOMetadataNode) metadata.getAsTree(IIOMetadataFormatImpl.standardMetadataFormatName); NodeList compressions = tree.getElementsByTagName("CompressionTypeName"); IIOMetadataNode compression = (IIOMetadataNode) compressions.item(0); assertEquals("None", compression.getAttribute("value")); boolean softwareFound = false; NodeList textEntries = tree.getElementsByTagName("TextEntry"); for (int i = 0; i < textEntries.getLength(); i++) { IIOMetadataNode textEntry = (IIOMetadataNode) textEntries.item(i); if ("Software".equals(textEntry.getAttribute("keyword"))) { softwareFound = true; assertEquals("IrfanView", textEntry.getAttribute("value")); } } assertTrue("Software metadata not found", softwareFound); } } @Test public void testReadWriteRead24BitLZW() throws IOException { // Read original LZW compressed TIFF IIOImage original; try (ImageInputStream input = ImageIO.createImageInputStream(getClassLoaderResource("/tiff/quad-lzw.tif"))) { ImageReader reader = ImageIO.getImageReaders(input).next(); reader.setInput(input); original = reader.readAll(0, null); reader.dispose(); } assumeNotNull(original); // Write it back, using same compression (copied from metadata) FastByteArrayOutputStream buffer = new FastByteArrayOutputStream(32768); try (ImageOutputStream output = ImageIO.createImageOutputStream(buffer)) { ImageWriter writer = createWriter(); writer.setOutput(output); writer.write(original); writer.dispose(); } // Try re-reading the same TIFF try (ImageInputStream input = ImageIO.createImageInputStream(buffer.createInputStream())) { ImageReader reader = ImageIO.getImageReaders(input).next(); reader.setInput(input); BufferedImage image = reader.read(0); BufferedImage orig = (BufferedImage) original.getRenderedImage(); int maxH = Math.min(300, image.getHeight()); for (int y = 0; y < maxH; y++) { for (int x = 0; x < image.getWidth(); x++) { assertRGBEquals("Pixel differ: ", orig.getRGB(x, y), image.getRGB(x, y), 0); } } IIOMetadata metadata = reader.getImageMetadata(0); IIOMetadataNode tree = (IIOMetadataNode) metadata.getAsTree(IIOMetadataFormatImpl.standardMetadataFormatName); IIOMetadataNode compression = (IIOMetadataNode) tree.getElementsByTagName("CompressionTypeName").item(0); assertEquals("LZW", compression.getAttribute("value")); boolean softwareFound = false; NodeList textEntries = tree.getElementsByTagName("TextEntry"); for (int i = 0; i < textEntries.getLength(); i++) { IIOMetadataNode textEntry = (IIOMetadataNode) textEntries.item(i); if ("Software".equals(textEntry.getAttribute("keyword"))) { softwareFound = true; assertTrue(textEntry.getAttribute("value").startsWith("TwelveMonkeys ImageIO TIFF")); } } assertTrue("Software metadata not found", softwareFound); } } @Test public void testReadWriteRead24BitDeflate() throws IOException { // Read original LZW compressed TIFF IIOImage original; try (ImageInputStream input = ImageIO.createImageInputStream(getClassLoaderResource("/tiff/quad-lzw.tif"))) { ImageReader reader = ImageIO.getImageReaders(input).next(); reader.setInput(input); original = reader.readAll(0, null); reader.dispose(); } assumeNotNull(original); // Write it back, using same compression (copied from metadata) FastByteArrayOutputStream buffer = new FastByteArrayOutputStream(32768); try (ImageOutputStream output = ImageIO.createImageOutputStream(buffer)) { ImageWriter writer = createWriter(); writer.setOutput(output); ImageWriteParam param = writer.getDefaultWriteParam(); param.setCompressionMode(ImageWriteParam.MODE_EXPLICIT); param.setCompressionType("Deflate"); writer.write(null, original, param); writer.dispose(); } // Try re-reading the same TIFF try (ImageInputStream input = ImageIO.createImageInputStream(buffer.createInputStream())) { ImageReader reader = ImageIO.getImageReaders(input).next(); reader.setInput(input); BufferedImage image = reader.read(0); BufferedImage orig = (BufferedImage) original.getRenderedImage(); int maxH = Math.min(300, image.getHeight()); for (int y = 0; y < maxH; y++) { for (int x = 0; x < image.getWidth(); x++) { assertRGBEquals("Pixel differ: ", orig.getRGB(x, y), image.getRGB(x, y), 0); } } IIOMetadata metadata = reader.getImageMetadata(0); IIOMetadataNode tree = (IIOMetadataNode) metadata.getAsTree(IIOMetadataFormatImpl.standardMetadataFormatName); IIOMetadataNode compression = (IIOMetadataNode) tree.getElementsByTagName("CompressionTypeName").item(0); assertEquals("Deflate", compression.getAttribute("value")); boolean softwareFound = false; NodeList textEntries = tree.getElementsByTagName("TextEntry"); for (int i = 0; i < textEntries.getLength(); i++) { IIOMetadataNode textEntry = (IIOMetadataNode) textEntries.item(i); if ("Software".equals(textEntry.getAttribute("keyword"))) { softwareFound = true; assertTrue(textEntry.getAttribute("value").startsWith("TwelveMonkeys ImageIO TIFF")); } } assertTrue("Software metadata not found", softwareFound); } } @Test public void testReadWriteRead24BitNone() throws IOException { // Read original LZW compressed TIFF IIOImage original; try (ImageInputStream input = ImageIO.createImageInputStream(getClassLoaderResource("/tiff/quad-lzw.tif"))) { ImageReader reader = ImageIO.getImageReaders(input).next(); reader.setInput(input); original = reader.readAll(0, null); reader.dispose(); } assumeNotNull(original); // Write it back, using same compression (copied from metadata) FastByteArrayOutputStream buffer = new FastByteArrayOutputStream(32768); try (ImageOutputStream output = ImageIO.createImageOutputStream(buffer)) { ImageWriter writer = createWriter(); writer.setOutput(output); ImageWriteParam param = writer.getDefaultWriteParam(); param.setCompressionMode(ImageWriteParam.MODE_EXPLICIT); param.setCompressionType("None"); writer.write(null, original, param); writer.dispose(); } // Path tempFile = Files.createTempFile("test-", ".tif"); // Files.write(tempFile, buffer.toByteArray()); // System.out.println("open " + tempFile.toAbsolutePath()); // Try re-reading the same TIFF try (ImageInputStream input = ImageIO.createImageInputStream(buffer.createInputStream())) { ImageReader reader = ImageIO.getImageReaders(input).next(); reader.setInput(input); BufferedImage image = reader.read(0); BufferedImage orig = (BufferedImage) original.getRenderedImage(); int maxH = Math.min(300, image.getHeight()); for (int y = 0; y < maxH; y++) { for (int x = 0; x < image.getWidth(); x++) { assertRGBEquals("Pixel differ: ", orig.getRGB(x, y), image.getRGB(x, y), 0); } } IIOMetadata metadata = reader.getImageMetadata(0); IIOMetadataNode tree = (IIOMetadataNode) metadata.getAsTree(IIOMetadataFormatImpl.standardMetadataFormatName); IIOMetadataNode compression = (IIOMetadataNode) tree.getElementsByTagName("CompressionTypeName").item(0); assertEquals("None", compression.getAttribute("value")); boolean softwareFound = false; NodeList textEntries = tree.getElementsByTagName("TextEntry"); for (int i = 0; i < textEntries.getLength(); i++) { IIOMetadataNode textEntry = (IIOMetadataNode) textEntries.item(i); if ("Software".equals(textEntry.getAttribute("keyword"))) { softwareFound = true; assertTrue(textEntry.getAttribute("value").startsWith("TwelveMonkeys ImageIO TIFF")); } } assertTrue("Software metadata not found", softwareFound); } } @Test public void testWriteCropped() throws IOException { List<URL> testData = Arrays.asList( getClassLoaderResource("/tiff/quad-lzw.tif"), getClassLoaderResource("/tiff/grayscale-alpha.tiff"), getClassLoaderResource("/tiff/ccitt/group3_1d.tif"), getClassLoaderResource("/tiff/depth/flower-palette-02.tif"), getClassLoaderResource("/tiff/depth/flower-palette-04.tif"), getClassLoaderResource("/tiff/depth/flower-minisblack-16.tif"), getClassLoaderResource("/tiff/depth/flower-minisblack-32.tif") ); for (URL resource : testData) { // Read it BufferedImage original = ImageIO.read(resource); // Crop it BufferedImage subimage = original.getSubimage(original.getWidth() / 4, original.getHeight() / 4, original.getWidth() / 2, original.getHeight() / 2); // Store cropped ByteArrayOutputStream bytes = new ByteArrayOutputStream(); try (ImageOutputStream output = ImageIO.createImageOutputStream(bytes)) { ImageWriter imageWriter = createWriter(); imageWriter.setOutput(output); imageWriter.write(subimage); } // Re-read cropped BufferedImage cropped = ImageIO.read(new ByteArrayImageInputStream(bytes.toByteArray())); // Compare assertImageEquals(String.format("Cropped output differs: %s", resource.getFile()), subimage, cropped, 0); } } private void assertImageEquals(final String message, final BufferedImage expected, final BufferedImage actual, final int tolerance) { assertNotNull(message, expected); assertNotNull(message, actual); assertEquals(message + ", widths differ", expected.getWidth(), actual.getWidth()); assertEquals(message + ", heights differ", expected.getHeight(), actual.getHeight()); for (int y = 0; y < expected.getHeight(); y++) { for (int x = 0; x < expected.getWidth(); x++) { assertRGBEquals(String.format("%s, ARGB differs at (%s,%s)", message, x, y), expected.getRGB(x, y), actual.getRGB(x, y), tolerance); } } } @Test public void testWriteStreamMetadataDefaultMM() throws IOException { ImageWriter writer = createWriter(); ByteArrayOutputStream output = new ByteArrayOutputStream(); try (ImageOutputStream stream = ImageIO.createImageOutputStream(output)) { stream.setByteOrder(ByteOrder.BIG_ENDIAN); // Should pass through writer.setOutput(stream); writer.write(null, new IIOImage(getTestData(0), null, null), null); } byte[] bytes = output.toByteArray(); assertArrayEquals(new byte[] {'M', 'M', 0, 42}, Arrays.copyOf(bytes, 4)); } @Test public void testWriteStreamMetadataDefaultII() throws IOException { ImageWriter writer = createWriter(); ByteArrayOutputStream output = new ByteArrayOutputStream(); try (ImageOutputStream stream = ImageIO.createImageOutputStream(output)) { stream.setByteOrder(ByteOrder.LITTLE_ENDIAN); // Should pass through writer.setOutput(stream); writer.write(null, new IIOImage(getTestData(0), null, null), null); } byte[] bytes = output.toByteArray(); assertArrayEquals(new byte[] {'I', 'I', 42, 0}, Arrays.copyOf(bytes, 4)); } @Test public void testWriteStreamMetadataMM() throws IOException { ImageWriter writer = createWriter(); ByteArrayOutputStream output = new ByteArrayOutputStream(); try (ImageOutputStream stream = ImageIO.createImageOutputStream(output)) { stream.setByteOrder(ByteOrder.LITTLE_ENDIAN); // Should be overridden by stream metadata writer.setOutput(stream); writer.write(new TIFFStreamMetadata(ByteOrder.BIG_ENDIAN), new IIOImage(getTestData(0), null, null), null); } byte[] bytes = output.toByteArray(); assertArrayEquals(new byte[] {'M', 'M', 0, 42}, Arrays.copyOf(bytes, 4)); } @Test public void testWriteStreamMetadataII() throws IOException { ImageWriter writer = createWriter(); ByteArrayOutputStream output = new ByteArrayOutputStream(); try (ImageOutputStream stream = ImageIO.createImageOutputStream(output)) { stream.setByteOrder(ByteOrder.BIG_ENDIAN); // Should be overridden by stream metadata writer.setOutput(stream); writer.write(new TIFFStreamMetadata(ByteOrder.LITTLE_ENDIAN), new IIOImage(getTestData(0), null, null), null); } byte[] bytes = output.toByteArray(); assertArrayEquals(new byte[] {'I', 'I', 42, 0}, Arrays.copyOf(bytes, 4)); } @Test public void testMergeTreeARGB() throws IOException { ImageWriter writer = createWriter(); ImageWriteParam writeParam = writer.getDefaultWriteParam(); writeParam.setCompressionMode(ImageWriteParam.MODE_EXPLICIT); writeParam.setCompressionType("LZW"); IIOMetadata metadata = writer.getDefaultImageMetadata(ImageTypeSpecifier.createFromBufferedImageType(BufferedImage.TYPE_4BYTE_ABGR), writeParam); IIOMetadataNode tiffTree = (IIOMetadataNode) metadata.getAsTree(metadata.getNativeMetadataFormatName()); metadata.setFromTree(metadata.getNativeMetadataFormatName(), tiffTree); } @Test public void testMergeTreeGray() throws IOException { ImageWriter writer = createWriter(); ImageWriteParam writeParam = writer.getDefaultWriteParam(); writeParam.setCompressionMode(ImageWriteParam.MODE_EXPLICIT); writeParam.setCompressionType("LZW"); IIOMetadata metadata = writer.getDefaultImageMetadata(ImageTypeSpecifier.createFromBufferedImageType(BufferedImage.TYPE_BYTE_GRAY), writeParam); IIOMetadataNode tiffTree = (IIOMetadataNode) metadata.getAsTree(metadata.getNativeMetadataFormatName()); metadata.setFromTree(metadata.getNativeMetadataFormatName(), tiffTree); } @Test public void testMergeTreeBW() throws IOException { ImageWriter writer = createWriter(); ImageWriteParam writeParam = writer.getDefaultWriteParam(); writeParam.setCompressionMode(ImageWriteParam.MODE_EXPLICIT); writeParam.setCompressionType("CCITT T.6"); IIOMetadata metadata = writer.getDefaultImageMetadata(ImageTypeSpecifier.createFromBufferedImageType(BufferedImage.TYPE_BYTE_BINARY), writeParam); IIOMetadataNode tiffTree = (IIOMetadataNode) metadata.getAsTree(metadata.getNativeMetadataFormatName()); metadata.setFromTree(metadata.getNativeMetadataFormatName(), tiffTree); } @Test public void testRewrite() throws IOException { ImageWriter writer = createWriter(); ImageReader reader = ImageIO.getImageReader(writer); List<URL> testData = Arrays.asList( getClassLoaderResource("/tiff/pixtiff/17-tiff-binary-ccitt-group3.tif"), getClassLoaderResource("/tiff/pixtiff/36-tiff-8-bit-gray-jpeg.tif"), getClassLoaderResource("/tiff/pixtiff/51-tiff-24-bit-color-jpeg.tif"), getClassLoaderResource("/tiff/pixtiff/58-plexustiff-binary-ccitt-group4.tif"), getClassLoaderResource("/tiff/balloons.tif"), getClassLoaderResource("/tiff/ColorCheckerCalculator.tif"), getClassLoaderResource("/tiff/quad-jpeg.tif"), getClassLoaderResource("/tiff/quad-lzw.tif"), getClassLoaderResource("/tiff/bali.tif"), getClassLoaderResource("/tiff/lzw-colormap-iiobe.tif"), // TODO: FixMe for ColorMap + ExtraSamples (custom ColorModel) // getClassLoaderResource("/tiff/colormap-with-extrasamples.tif"), getClassLoaderResource("/tiff/depth/flower-minisblack-02.tif"), getClassLoaderResource("/tiff/depth/flower-minisblack-04.tif"), getClassLoaderResource("/tiff/depth/flower-minisblack-06.tif"), getClassLoaderResource("/tiff/depth/flower-minisblack-08.tif"), getClassLoaderResource("/tiff/depth/flower-minisblack-10.tif"), getClassLoaderResource("/tiff/depth/flower-minisblack-12.tif"), getClassLoaderResource("/tiff/depth/flower-minisblack-14.tif"), getClassLoaderResource("/tiff/depth/flower-minisblack-16.tif"), getClassLoaderResource("/tiff/depth/flower-minisblack-24.tif"), getClassLoaderResource("/tiff/depth/flower-minisblack-32.tif"), getClassLoaderResource("/tiff/depth/flower-palette-02.tif"), getClassLoaderResource("/tiff/depth/flower-palette-04.tif"), getClassLoaderResource("/tiff/depth/flower-palette-08.tif"), getClassLoaderResource("/tiff/depth/flower-palette-16.tif"), getClassLoaderResource("/tiff/depth/flower-rgb-contig-08.tif"), // TODO: FixMe for RGB > 8 bits / sample // getClassLoaderResource("/tiff/depth/flower-rgb-contig-10.tif"), // getClassLoaderResource("/tiff/depth/flower-rgb-contig-12.tif"), // getClassLoaderResource("/tiff/depth/flower-rgb-contig-14.tif"), // getClassLoaderResource("/tiff/depth/flower-rgb-contig-16.tif"), // getClassLoaderResource("/tiff/depth/flower-rgb-contig-24.tif"), // getClassLoaderResource("/tiff/depth/flower-rgb-contig-32.tif"), getClassLoaderResource("/tiff/depth/flower-rgb-planar-08.tif"), // TODO: FixMe for planar RGB > 8 bits / sample // getClassLoaderResource("/tiff/depth/flower-rgb-planar-10.tif"), // getClassLoaderResource("/tiff/depth/flower-rgb-planar-12.tif"), // getClassLoaderResource("/tiff/depth/flower-rgb-planar-14.tif"), // getClassLoaderResource("/tiff/depth/flower-rgb-planar-16.tif"), // getClassLoaderResource("/tiff/depth/flower-rgb-planar-24.tif"), getClassLoaderResource("/tiff/scan-mono-iccgray.tif"), getClassLoaderResource("/tiff/old-style-jpeg-inconsistent-metadata.tif"), getClassLoaderResource("/tiff/ccitt/group3_1d.tif"), getClassLoaderResource("/tiff/ccitt/group3_2d.tif"), getClassLoaderResource("/tiff/ccitt/group3_1d_fill.tif"), getClassLoaderResource("/tiff/ccitt/group3_2d_fill.tif"), getClassLoaderResource("/tiff/ccitt/group4.tif") ); for (URL url : testData) { ByteArrayOutputStream output = new ByteArrayOutputStream(); try (ImageInputStream input = ImageIO.createImageInputStream(url); ImageOutputStream stream = ImageIO.createImageOutputStream(output)) { reader.setInput(input); writer.setOutput(stream); List<ImageInfo> infos = new ArrayList<>(20); writer.prepareWriteSequence(null); for (int i = 0; i < reader.getNumImages(true); i++) { IIOImage image = reader.readAll(i, null); // If compression is Old JPEG, rewrite as JPEG // Normally, use the getAsTree method, but we don't care here if we are tied to our impl TIFFImageMetadata metadata = (TIFFImageMetadata) image.getMetadata(); Directory ifd = metadata.getIFD(); Entry compressionEntry = ifd.getEntryById(TIFF.TAG_COMPRESSION); int compression = compressionEntry != null ? ((Number) compressionEntry.getValue()).intValue() : TIFFBaseline.COMPRESSION_NONE; infos.add(new ImageInfo(image.getRenderedImage().getWidth(), image.getRenderedImage().getHeight(), compression)); ImageWriteParam param = writer.getDefaultWriteParam(); if (compression == TIFFExtension.COMPRESSION_OLD_JPEG) { param.setCompressionMode(ImageWriteParam.MODE_EXPLICIT); // Override the copy from metadata param.setCompressionType("JPEG"); } writer.writeToSequence(image, param); } writer.endWriteSequence(); // File tempFile = File.createTempFile("foo-", ".tif"); // System.err.println("open " + tempFile.getAbsolutePath()); // FileUtil.write(tempFile, output.toByteArray()); try (ImageInputStream inputAfter = new ByteArrayImageInputStream(output.toByteArray())) { reader.setInput(inputAfter); int numImages = reader.getNumImages(true); assertEquals("Number of pages differs from original", infos.size(), numImages); for (int i = 0; i < numImages; i++) { IIOImage after = reader.readAll(i, null); ImageInfo info = infos.get(i); TIFFImageMetadata afterMetadata = (TIFFImageMetadata) after.getMetadata(); Directory afterIfd = afterMetadata.getIFD(); Entry afterCompressionEntry = afterIfd.getEntryById(TIFF.TAG_COMPRESSION); if (info.compression == TIFFExtension.COMPRESSION_OLD_JPEG) { // Should rewrite this from old-style to new style assertEquals("Old JPEG compression not rewritten as JPEG", TIFFExtension.COMPRESSION_JPEG, ((Number) afterCompressionEntry.getValue()).intValue()); } else { assertEquals("Compression differs from original", info.compression, ((Number) afterCompressionEntry.getValue()).intValue()); } assertEquals("Image width differs from original", info.width, after.getRenderedImage().getWidth()); assertEquals("Image height differs from original", info.height, after.getRenderedImage().getHeight()); } } } } } private static class ImageInfo { final int width; final int height; final int compression; private ImageInfo(int width, int height, int compression) { this.width = width; this.height = height; this.compression = compression; } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.datatorrent.stram; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.tuple.MutablePair; import org.apache.hadoop.yarn.api.records.NodeReport; import org.apache.hadoop.yarn.api.records.NodeState; import org.apache.hadoop.yarn.api.records.Priority; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.client.api.AMRMClient; import org.apache.hadoop.yarn.client.api.AMRMClient.ContainerRequest; import org.apache.hadoop.yarn.util.Records; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.datatorrent.stram.StreamingContainerAgent.ContainerStartRequest; import com.datatorrent.stram.plan.physical.PTContainer; import com.datatorrent.stram.plan.physical.PTOperator; import com.datatorrent.stram.plan.physical.PTOperator.HostOperatorSet; /** * Handle mapping from physical plan locality groupings to resource allocation requests. Monitors available resources * through node reports. * * @since 0.3.4 */ public class ResourceRequestHandler { private static final Logger LOG = LoggerFactory.getLogger(ResourceRequestHandler.class); private static final String INVALID_HOST = "INVALID_HOST"; protected static final int NUMBER_MISSED_HEARTBEATS = 30; public ResourceRequestHandler() { super(); } /** * Issue requests to AM RM Client again if previous container requests expired and were not allocated by Yarn * @param amRmClient * @param requestedResources * @param loopCounter * @param resourceRequestor * @param containerRequests * @param removedContainerRequests */ public void reissueContainerRequests(AMRMClient<ContainerRequest> amRmClient, Map<StreamingContainerAgent.ContainerStartRequest, MutablePair<Integer, ContainerRequest>> requestedResources, int loopCounter, ResourceRequestHandler resourceRequestor, List<ContainerRequest> containerRequests, List<ContainerRequest> removedContainerRequests) { if (!requestedResources.isEmpty()) { for (Map.Entry<StreamingContainerAgent.ContainerStartRequest, MutablePair<Integer, ContainerRequest>> entry : requestedResources.entrySet()) { /* * Create container requests again if pending requests were not allocated by Yarn till timeout. */ if ((loopCounter - entry.getValue().getKey()) > NUMBER_MISSED_HEARTBEATS) { StreamingContainerAgent.ContainerStartRequest csr = entry.getKey(); removedContainerRequests.add(entry.getValue().getRight()); ContainerRequest cr = resourceRequestor.createContainerRequest(csr, false); entry.getValue().setLeft(loopCounter); entry.getValue().setRight(cr); containerRequests.add(cr); } } } } /** * Add container request to list of issued requests to Yarn along with current loop counter * @param requestedResources * @param loopCounter * @param containerRequests * @param csr * @param cr */ public void addContainerRequest(Map<StreamingContainerAgent.ContainerStartRequest, MutablePair<Integer, ContainerRequest>> requestedResources, int loopCounter, List<ContainerRequest> containerRequests, StreamingContainerAgent.ContainerStartRequest csr, ContainerRequest cr) { MutablePair<Integer, ContainerRequest> pair = new MutablePair<Integer, ContainerRequest>(loopCounter, cr); requestedResources.put(csr, pair); containerRequests.add(cr); } /** * Setup the request(s) that will be sent to the RM for the container ask. */ public ContainerRequest createContainerRequest(ContainerStartRequest csr, boolean first) { int priority = csr.container.getResourceRequestPriority(); // check for node locality constraint String[] nodes = null; String[] racks = null; String host = getHost(csr, first); Resource capability = Records.newRecord(Resource.class); capability.setMemory(csr.container.getRequiredMemoryMB()); capability.setVirtualCores(csr.container.getRequiredVCores()); if (host == INVALID_HOST) { return null; } if (host != null) { nodes = new String[]{host}; // in order to request a host, we don't have to set the rack if the locality is false /* * if(this.nodeToRack.get(host) != null){ racks = new String[] { this.nodeToRack.get(host) }; } */ return new ContainerRequest(capability, nodes, racks, Priority.newInstance(priority), false); } // For now, only memory is supported so we set memory requirements return new ContainerRequest(capability, nodes, racks, Priority.newInstance(priority)); } private final Map<String, NodeReport> nodeReportMap = Maps.newHashMap(); private final Map<Set<PTOperator>, String> nodeLocalMapping = Maps.newHashMap(); private final Map<String, String> nodeToRack = Maps.newHashMap(); private final Map<PTContainer, String> antiAffinityMapping = Maps.newHashMap(); public void clearNodeMapping() { nodeLocalMapping.clear(); } /** * Tracks update to available resources. Resource availability is used to make decisions about where to request new * containers. * * @param nodeReports */ public void updateNodeReports(List<NodeReport> nodeReports) { // LOG.debug("Got {} updated node reports.", nodeReports.size()); for (NodeReport nr : nodeReports) { StringBuilder sb = new StringBuilder(); sb.append("rackName=").append(nr.getRackName()).append(",nodeid=").append(nr.getNodeId()).append(",numContainers=").append(nr.getNumContainers()).append(",capability=").append(nr.getCapability()).append("used=").append(nr.getUsed()).append("state=").append(nr.getNodeState()); LOG.info("Node report: " + sb); nodeReportMap.put(nr.getNodeId().getHost(), nr); nodeToRack.put(nr.getNodeId().getHost(), nr.getRackName()); } } public List<String> getNodesExceptHost(List<String> hostNames) { List<String> nodesList = new ArrayList<String>(); Set<String> hostNameSet = Sets.newHashSet(); hostNameSet.addAll(hostNames); for (String host : nodeReportMap.keySet()) { // Split node name and port String[] parts = host.split(":"); if (parts.length > 0) { if (hostNameSet.contains(parts[0]) || hostNameSet.contains(host)) { continue; } nodesList.add(parts[0]); } } return nodesList; } public String getHost(ContainerStartRequest csr, boolean first) { String host = null; PTContainer c = csr.container; if (first) { for (PTOperator oper : c.getOperators()) { HostOperatorSet grpObj = oper.getNodeLocalOperators(); host = nodeLocalMapping.get(grpObj.getOperatorSet()); if (host != null) { antiAffinityMapping.put(c, host); return host; } if (grpObj.getHost() != null) { host = grpObj.getHost(); // using the 1st host value as host for container break; } } if (host != null && nodeReportMap.get(host) != null) { for (PTOperator oper : c.getOperators()) { HostOperatorSet grpObj = oper.getNodeLocalOperators(); Set<PTOperator> nodeLocalSet = grpObj.getOperatorSet(); NodeReport report = nodeReportMap.get(host); int aggrMemory = c.getRequiredMemoryMB(); int vCores = c.getRequiredVCores(); Set<PTContainer> containers = Sets.newHashSet(); containers.add(c); for (PTOperator nodeLocalOper : nodeLocalSet) { if (!containers.contains(nodeLocalOper.getContainer())) { aggrMemory += nodeLocalOper.getContainer().getRequiredMemoryMB(); vCores += nodeLocalOper.getContainer().getRequiredVCores(); containers.add(nodeLocalOper.getContainer()); } } int memAvailable = report.getCapability().getMemory() - report.getUsed().getMemory(); int vCoresAvailable = report.getCapability().getVirtualCores() - report.getUsed().getVirtualCores(); if (memAvailable >= aggrMemory && vCoresAvailable >= vCores) { nodeLocalMapping.put(nodeLocalSet, host); antiAffinityMapping.put(c, host); return host; } } } } // the host requested didn't have the resources so looking for other hosts host = null; List<String> antiHosts = new ArrayList<>(); List<String> antiPreferredHosts = new ArrayList<>(); if (!c.getStrictAntiPrefs().isEmpty()) { // Check if containers are allocated already for the anti-affinity containers populateAntiHostList(c, antiHosts); } if (!c.getPreferredAntiPrefs().isEmpty()) { populateAntiHostList(c, antiPreferredHosts); } LOG.info("Strict anti-affinity = {} for container with operators {}", antiHosts, StringUtils.join(c.getOperators(), ",")); for (PTOperator oper : c.getOperators()) { HostOperatorSet grpObj = oper.getNodeLocalOperators(); Set<PTOperator> nodeLocalSet = grpObj.getOperatorSet(); if (nodeLocalSet.size() > 1 || !c.getStrictAntiPrefs().isEmpty() || !c.getPreferredAntiPrefs().isEmpty()) { LOG.info("Finding new host for {}", nodeLocalSet); int aggrMemory = c.getRequiredMemoryMB(); int vCores = c.getRequiredVCores(); Set<PTContainer> containers = Sets.newHashSet(); containers.add(c); // aggregate memory required for all containers for (PTOperator nodeLocalOper : nodeLocalSet) { if (!containers.contains(nodeLocalOper.getContainer())) { aggrMemory += nodeLocalOper.getContainer().getRequiredMemoryMB(); vCores += nodeLocalOper.getContainer().getRequiredVCores(); containers.add(nodeLocalOper.getContainer()); } } host = assignHost(host, antiHosts, antiPreferredHosts, grpObj, nodeLocalSet, aggrMemory, vCores); if (host == null && !antiPreferredHosts.isEmpty() && !antiHosts.isEmpty()) { // Drop the preferred constraint and try allocation antiPreferredHosts.clear(); host = assignHost(host, antiHosts, antiPreferredHosts, grpObj, nodeLocalSet, aggrMemory, vCores); } if (host != null) { antiAffinityMapping.put(c, host); } else { host = INVALID_HOST; } } } LOG.info("Found host {}", host); return host; } /** * Populate list of nodes where container cannot be allocated due to anti-affinity constraints * @param c container * @param antiHosts List of nodes where container cannot be allocated */ public void populateAntiHostList(PTContainer c, List<String> antiHosts) { for (PTContainer container : c.getStrictAntiPrefs()) { if (antiAffinityMapping.containsKey(container)) { antiHosts.add(antiAffinityMapping.get(container)); } else { // Check if there is an anti-affinity with host locality String antiHost = getHostForContainer(container); if (antiHost != null) { antiHosts.add(antiHost); } } } } /** * Get host name where container would be allocated give node local constraints * @param container * @return */ public String getHostForContainer(PTContainer container) { for (PTOperator oper : container.getOperators()) { HostOperatorSet grpObj = oper.getNodeLocalOperators(); String host = nodeLocalMapping.get(grpObj.getOperatorSet()); if (host != null) { return host; } if (grpObj.getHost() != null) { host = grpObj.getHost(); return host; } } return null; } /** * Assign host to container given affinity and anti-affinity constraints and resource availibility on node * @param host * @param antiHosts * @param antiPreferredHosts * @param grpObj * @param nodeLocalSet * @param aggrMemory * @param vCores * @return */ public String assignHost(String host, List<String> antiHosts, List<String> antiPreferredHosts, HostOperatorSet grpObj, Set<PTOperator> nodeLocalSet, int aggrMemory, int vCores) { for (Map.Entry<String, NodeReport> nodeEntry : nodeReportMap.entrySet()) { if (nodeEntry.getValue().getNodeState() == NodeState.RUNNING) { int memAvailable = nodeEntry.getValue().getCapability().getMemory() - nodeEntry.getValue().getUsed().getMemory(); int vCoresAvailable = nodeEntry.getValue().getCapability().getVirtualCores() - nodeEntry.getValue().getUsed().getVirtualCores(); if (memAvailable >= aggrMemory && vCoresAvailable >= vCores && !antiHosts.contains(nodeEntry.getKey()) && !antiPreferredHosts.contains(nodeEntry.getKey())) { host = nodeEntry.getKey(); grpObj.setHost(host); nodeLocalMapping.put(nodeLocalSet, host); return host; } } } return null; } }
package ch.epfl.bigdata.ts.genalg; import ch.epfl.bigdata.ts.dataparser.Tick; import ch.epfl.bigdata.ts.ga.Chromosome; import ch.epfl.bigdata.ts.ga.util.Range; import ch.epfl.bigdata.ts.pattern.fitness.StockParameters; import java.util.LinkedList; import java.util.List; import java.util.Random; public class Individual{ public static final int GENE_BOTTOM_1 = 0; public static final int GENE_BOTTOM_2 = 1; public static final int GENE_PROTECT_SELL_GAIN = 2; public static final int GENE_PROTECT_SELL_LOSS = 3; public static final int GENE_TREND_STRENGTH = 4; protected int sell = DO_NOT_SELL; public static int DO_NOT_SELL = 11; public static int SELL_WITH_LOSS = 22; public static int SELL_WITH_GAIN = 33; boolean buy = false; private double[] genes = new double[Constants.NUMBER_OF_GENES]; /* variables important for the trading strategy */ private double bottom1; private double bottom2; private double top; private boolean openPosition = false; private double lastPrice; private double sellLoss; private double sellGain; private double amount; private int numOfShares; private int count = 0; private StockParameters sp; static Random r = new Random(System.currentTimeMillis()); //Create a random individual public void generateIndividual(){ for (int i=0; i<Constants.NUMBER_OF_GENES; i++){ generate_gene(i); } } public double transform_gene(int index){ double v = 0; for (int i= index*Constants.GENE_LENGTH; i< (index+1)*Constants.GENE_LENGTH; i++){ v=v*2+genes[i]; } List<Range> r = Constants.getGeneRanges(); return r.get(index).getLower() + (r.get(index).getUpper()-r.get(index).getLower())*(v/Math.pow(2,Constants.GENE_LENGTH)); } public void generate_gene(int index){ genes[index] = r.nextInt(2); } public void reset(){ bottom1 = -1; bottom2 = -1; top = -1; openPosition = false; lastPrice = 0; sellLoss = -1; sellGain = -1; sp = new StockParameters(true); amount = Constants.STARTING_MONEY; } /*Getters and setters*/ public double getGene(int index){ return genes[index]; } public double getLastPrice(){return lastPrice;} public void setAmount(double value){ amount = value; } public void setGene(int index, double value){ genes[index] = value; } /*Public methods*/ public int size(){ return genes.length; } public double getFitness(){ return amount + numOfShares*lastPrice; } /* * The implemented strategy: * for genes[0] time listen to prices, then calculate the avg * if the price is below genes[1]*avg, then buy with the last price * if the price is below genes[2]*avg, then sell * if the price is above genes[3]*avg, then sell * */ protected int trade(Tick transaction/*, Chromosome chr, boolean logForViz, StringBuilder vizLog, int order*/) { int toRet = 0; /* if (logForViz) { vizLog.append(order + "" + transaction.getTimestamp()); vizLog.append("," + transaction.getPrice()); } */ lastPrice = transaction.getPrice(); sp.calculate(transaction.getTimestamp(), lastPrice); /* int sold = 0; int bought = 0; long b1ts = -1; long b2ts = -1; long tts = -1; */ if (sell == SELL_WITH_LOSS){ sell(); // sold = 1; toRet = 1; // b2ts = tts = -1; // b1ts = transaction.getTimestamp(); } else if (sell == SELL_WITH_GAIN){ double tmp = top; sell(); // sold = 1; toRet = 1; // b2ts = tts = -1; // b1ts = transaction.getTimestamp(); top = tmp; } if (openPosition) { if (buy) { buy = false; numOfShares = (int) Math.floor(amount / lastPrice); amount -= numOfShares * lastPrice; double avg = top - bottom1 + top - bottom2; avg /= 2; sellLoss = lastPrice - transform_gene(GENE_PROTECT_SELL_LOSS) * avg; sellGain = lastPrice + transform_gene(GENE_PROTECT_SELL_GAIN) * avg; toRet = 1; // bought = 1; } else { if (lastPrice <= sellLoss) { bottom1 = lastPrice; sell = SELL_WITH_LOSS; } else if (lastPrice >= sellGain) { sell = SELL_WITH_GAIN; bottom1 = bottom2; if ((lastPrice - bottom1) >= transform_gene(GENE_BOTTOM_1)) { top = lastPrice; // tts = transaction.getTimestamp(); } } } } else { if (bottom1 == -1) { //if (-sp.getTrendStrength() >= transform_gene(GENE_TREND_STRENGTH)) { bottom1 = lastPrice; // b1ts = transaction.getTimestamp(); //} } else if (top == -1) { if (lastPrice < bottom1) { bottom1 = lastPrice; // b1ts = transaction.getTimestamp(); } else if ((lastPrice - bottom1) >= transform_gene(GENE_BOTTOM_1)) { top = lastPrice; // tts = transaction.getTimestamp(); } } else if (bottom2 == -1) { if (lastPrice > top) { top = lastPrice; // tts = transaction.getTimestamp(); } else if ((top - lastPrice) >= transform_gene(GENE_BOTTOM_2)) { bottom2 = lastPrice; // openPosition = true; // buy = true; // b2ts = transaction.getTimestamp(); } } else { if (lastPrice < bottom2) { bottom2 = lastPrice; // b2ts = transaction.getTimestamp(); } else { //buy openPosition = true; buy = true; } } } /* if (logForViz) { if (b1ts > 0) vizLog.append("," + order + "" + b1ts); else vizLog.append("," + b1ts); if (b2ts > 0) vizLog.append("," + order + "" + b2ts); else vizLog.append("," + b2ts); if (tts > 0) vizLog.append("," + order + "" + tts); else vizLog.append("," + tts); vizLog.append("," + bought); vizLog.append("," + sold); vizLog.append("," + sellGain); vizLog.append("," + sellLoss); if (sold == 1 || bought == 1 || order == 0) { vizLog.append("," + amount + "," + numOfShares); } vizLog.append("\n"); } */ return toRet; } /* public void trade(long time,double price){ lastPrice = price; count ++; if (openPosition) { if (lastPrice <= sellLoss) { bottom1 = lastPrice; sell(); //return 1; } else if (lastPrice >= sellGain) { bottom1 = bottom2; sell(); //return 1; } } else { if ((bottom1 == -1) ){ bottom1 = lastPrice; count = 0; } else if (top == -1) { if (lastPrice < bottom1) { bottom1 = lastPrice; count = 0; } else if ((lastPrice - bottom1) >= transform_gene(GENE_BOTTOM_1)) { top = lastPrice; } } else if (bottom2 == -1) { if (lastPrice > top) { top = lastPrice; } else if ((top - lastPrice) >= transform_gene(GENE_BOTTOM_2)) { bottom2 = lastPrice; } } else { if (lastPrice < bottom2) { bottom2 = lastPrice; } else { //buy openPosition = true; numOfShares = (int) Math.floor(amount / lastPrice); amount -= numOfShares * lastPrice; double avg = top - bottom1 + top - bottom2; avg /= 2; sellLoss = lastPrice - transform_gene(GENE_PROTECT_SELL_LOSS) * avg; sellGain = lastPrice + transform_gene(GENE_PROTECT_SELL_GAIN) * avg; //return 1; } } } } */ /* public void buy(){ openPosition = true; numberOfShares = (int)Math.floor(amount/price); amount -= numberOfShares*price; } */ private void sell() { openPosition = false; sell = DO_NOT_SELL; amount += numOfShares * lastPrice; numOfShares = 0; bottom2 = top = -1; sellLoss = -1; sellGain = -1; } @Override public String toString(){ String geneString = ""; /* for (int i=0; i<size();i++){ geneString += getGene(i); geneString += " "; } */ for (int i=0; i<Constants.numberOfUnits();i++){ geneString += transform_gene(i); geneString += " "; } return geneString; } }
/* * Hibernate, Relational Persistence for Idiomatic Java * * Copyright (c) 2011 by Red Hat Inc and/or its affiliates or by * third-party contributors as indicated by either @author tags or express * copyright attribution statements applied by the authors. All * third-party contributions are distributed under license by Red Hat Inc. * * This copyrighted material is made available to anyone wishing to use, modify, * copy, or redistribute it subject to the terms and conditions of the GNU * Lesser General Public License, as published by the Free Software Foundation. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License * for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this distribution; if not, write to: * Free Software Foundation, Inc. * 51 Franklin Street, Fifth Floor * Boston, MA 02110-1301 USA */ package org.hibernate.test.annotations.xml.ejb3; import javax.persistence.Access; import javax.persistence.AccessType; import javax.persistence.AssociationOverride; import javax.persistence.AssociationOverrides; import javax.persistence.AttributeOverride; import javax.persistence.AttributeOverrides; import javax.persistence.CollectionTable; import javax.persistence.Column; import javax.persistence.ElementCollection; import javax.persistence.EnumType; import javax.persistence.Enumerated; import javax.persistence.FetchType; import javax.persistence.JoinColumn; import javax.persistence.JoinTable; import javax.persistence.Lob; import javax.persistence.MapKey; import javax.persistence.MapKeyClass; import javax.persistence.MapKeyColumn; import javax.persistence.MapKeyEnumerated; import javax.persistence.MapKeyJoinColumn; import javax.persistence.MapKeyJoinColumns; import javax.persistence.MapKeyTemporal; import javax.persistence.OrderBy; import javax.persistence.OrderColumn; import javax.persistence.Temporal; import javax.persistence.TemporalType; import javax.persistence.UniqueConstraint; public class Ejb3XmlElementCollectionTest extends Ejb3XmlTestCase { public void testNoChildren() throws Exception { reader = getReader( Entity2.class, "field1", "element-collection.orm1.xml" ); assertAnnotationPresent( ElementCollection.class ); assertAnnotationNotPresent( OrderBy.class ); assertAnnotationNotPresent( OrderColumn.class ); assertAnnotationNotPresent( MapKey.class ); assertAnnotationNotPresent( MapKeyClass.class ); assertAnnotationNotPresent( MapKeyTemporal.class ); assertAnnotationNotPresent( MapKeyEnumerated.class ); assertAnnotationNotPresent( MapKeyColumn.class ); assertAnnotationNotPresent( MapKeyJoinColumns.class ); assertAnnotationNotPresent( MapKeyJoinColumn.class ); assertAnnotationNotPresent( Column.class ); assertAnnotationNotPresent( Temporal.class ); assertAnnotationNotPresent( Enumerated.class ); assertAnnotationNotPresent( Lob.class ); assertAnnotationNotPresent( AttributeOverride.class ); assertAnnotationNotPresent( AttributeOverrides.class ); assertAnnotationNotPresent( AssociationOverride.class ); assertAnnotationNotPresent( AssociationOverrides.class ); assertAnnotationNotPresent( CollectionTable.class ); assertAnnotationNotPresent( Access.class ); ElementCollection relAnno = reader.getAnnotation( ElementCollection.class ); assertEquals( FetchType.LAZY, relAnno.fetch() ); assertEquals( void.class, relAnno.targetClass() ); } public void testOrderBy() throws Exception { reader = getReader( Entity2.class, "field1", "element-collection.orm2.xml" ); assertAnnotationPresent( ElementCollection.class ); assertAnnotationPresent( OrderBy.class ); assertAnnotationNotPresent( OrderColumn.class ); assertEquals( "col1 ASC, col2 DESC", reader.getAnnotation( OrderBy.class ) .value() ); } public void testOrderColumnNoAttributes() throws Exception { reader = getReader( Entity2.class, "field1", "element-collection.orm3.xml" ); assertAnnotationPresent( ElementCollection.class ); assertAnnotationNotPresent( OrderBy.class ); assertAnnotationPresent( OrderColumn.class ); OrderColumn orderColumnAnno = reader.getAnnotation( OrderColumn.class ); assertEquals( "", orderColumnAnno.columnDefinition() ); assertEquals( "", orderColumnAnno.name() ); assertTrue( orderColumnAnno.insertable() ); assertTrue( orderColumnAnno.nullable() ); assertTrue( orderColumnAnno.updatable() ); } public void testOrderColumnAllAttributes() throws Exception { reader = getReader( Entity2.class, "field1", "element-collection.orm4.xml" ); assertAnnotationPresent( ElementCollection.class ); assertAnnotationNotPresent( OrderBy.class ); assertAnnotationPresent( OrderColumn.class ); OrderColumn orderColumnAnno = reader.getAnnotation( OrderColumn.class ); assertEquals( "int", orderColumnAnno.columnDefinition() ); assertEquals( "col1", orderColumnAnno.name() ); assertFalse( orderColumnAnno.insertable() ); assertFalse( orderColumnAnno.nullable() ); assertFalse( orderColumnAnno.updatable() ); } public void testMapKeyNoAttributes() throws Exception { reader = getReader( Entity3.class, "field1", "element-collection.orm5.xml" ); assertAnnotationPresent( ElementCollection.class ); assertAnnotationPresent( MapKey.class ); assertAnnotationNotPresent( MapKeyClass.class ); assertAnnotationNotPresent( MapKeyTemporal.class ); assertAnnotationNotPresent( MapKeyEnumerated.class ); assertAnnotationNotPresent( MapKeyColumn.class ); assertAnnotationNotPresent( MapKeyJoinColumns.class ); assertAnnotationNotPresent( MapKeyJoinColumn.class ); assertEquals( "", reader.getAnnotation( MapKey.class ).name() ); } public void testMapKeyAllAttributes() throws Exception { reader = getReader( Entity3.class, "field1", "element-collection.orm6.xml" ); assertAnnotationPresent( ElementCollection.class ); assertAnnotationPresent( MapKey.class ); assertAnnotationNotPresent( MapKeyClass.class ); assertAnnotationNotPresent( MapKeyTemporal.class ); assertAnnotationNotPresent( MapKeyEnumerated.class ); assertAnnotationNotPresent( MapKeyColumn.class ); assertAnnotationNotPresent( MapKeyJoinColumns.class ); assertAnnotationNotPresent( MapKeyJoinColumn.class ); assertEquals( "field2", reader.getAnnotation( MapKey.class ).name() ); } public void testMapKeyClass() throws Exception { reader = getReader( Entity3.class, "field1", "element-collection.orm7.xml" ); assertAnnotationPresent( ElementCollection.class ); assertAnnotationNotPresent( MapKey.class ); assertAnnotationPresent( MapKeyClass.class ); assertAnnotationNotPresent( MapKeyTemporal.class ); assertAnnotationNotPresent( MapKeyEnumerated.class ); assertAnnotationNotPresent( MapKeyColumn.class ); assertAnnotationNotPresent( MapKeyJoinColumns.class ); assertAnnotationNotPresent( MapKeyJoinColumn.class ); assertEquals( Entity2.class, reader.getAnnotation( MapKeyClass.class ) .value() ); } public void testMapKeyTemporal() throws Exception { reader = getReader( Entity3.class, "field1", "element-collection.orm8.xml" ); assertAnnotationPresent( ElementCollection.class ); assertAnnotationNotPresent( MapKey.class ); assertAnnotationNotPresent( MapKeyClass.class ); assertAnnotationPresent( MapKeyTemporal.class ); assertAnnotationNotPresent( MapKeyEnumerated.class ); assertAnnotationNotPresent( MapKeyColumn.class ); assertAnnotationNotPresent( MapKeyJoinColumns.class ); assertAnnotationNotPresent( MapKeyJoinColumn.class ); assertEquals( TemporalType.DATE, reader.getAnnotation( MapKeyTemporal.class ).value() ); } public void testMapKeyEnumerated() throws Exception { reader = getReader( Entity3.class, "field1", "element-collection.orm9.xml" ); assertAnnotationPresent( ElementCollection.class ); assertAnnotationNotPresent( MapKey.class ); assertAnnotationNotPresent( MapKeyClass.class ); assertAnnotationNotPresent( MapKeyTemporal.class ); assertAnnotationPresent( MapKeyEnumerated.class ); assertAnnotationNotPresent( MapKeyColumn.class ); assertAnnotationNotPresent( MapKeyJoinColumns.class ); assertAnnotationNotPresent( MapKeyJoinColumn.class ); assertEquals( EnumType.STRING, reader.getAnnotation( MapKeyEnumerated.class ).value() ); } /** * When there's a single map key attribute override, we still wrap it with * an AttributeOverrides annotation. */ public void testSingleMapKeyAttributeOverride() throws Exception { reader = getReader( Entity3.class, "field1", "element-collection.orm10.xml" ); assertAnnotationPresent( ElementCollection.class ); assertAnnotationNotPresent( MapKey.class ); assertAnnotationNotPresent( MapKeyClass.class ); assertAnnotationNotPresent( MapKeyTemporal.class ); assertAnnotationNotPresent( MapKeyEnumerated.class ); assertAnnotationNotPresent( MapKeyColumn.class ); assertAnnotationNotPresent( MapKeyJoinColumns.class ); assertAnnotationNotPresent( MapKeyJoinColumn.class ); assertAnnotationNotPresent( AttributeOverride.class ); assertAnnotationPresent( AttributeOverrides.class ); AttributeOverrides overridesAnno = reader .getAnnotation( AttributeOverrides.class ); AttributeOverride[] overrides = overridesAnno.value(); assertEquals( 1, overrides.length ); assertEquals( "field1", overrides[0].name() ); assertEquals( "col1", overrides[0].column().name() ); } public void testMultipleMapKeyAttributeOverrides() throws Exception { reader = getReader( Entity3.class, "field1", "element-collection.orm11.xml" ); assertAnnotationPresent( ElementCollection.class ); assertAnnotationNotPresent( MapKey.class ); assertAnnotationNotPresent( MapKeyClass.class ); assertAnnotationNotPresent( MapKeyTemporal.class ); assertAnnotationNotPresent( MapKeyEnumerated.class ); assertAnnotationNotPresent( MapKeyColumn.class ); assertAnnotationNotPresent( MapKeyJoinColumns.class ); assertAnnotationNotPresent( MapKeyJoinColumn.class ); assertAnnotationNotPresent( AttributeOverride.class ); assertAnnotationPresent( AttributeOverrides.class ); AttributeOverrides overridesAnno = reader .getAnnotation( AttributeOverrides.class ); AttributeOverride[] overrides = overridesAnno.value(); assertEquals( 2, overrides.length ); assertEquals( "field1", overrides[0].name() ); assertEquals( "", overrides[0].column().name() ); assertFalse( overrides[0].column().unique() ); assertTrue( overrides[0].column().nullable() ); assertTrue( overrides[0].column().insertable() ); assertTrue( overrides[0].column().updatable() ); assertEquals( "", overrides[0].column().columnDefinition() ); assertEquals( "", overrides[0].column().table() ); assertEquals( 255, overrides[0].column().length() ); assertEquals( 0, overrides[0].column().precision() ); assertEquals( 0, overrides[0].column().scale() ); assertEquals( "field2", overrides[1].name() ); assertEquals( "col1", overrides[1].column().name() ); assertTrue( overrides[1].column().unique() ); assertFalse( overrides[1].column().nullable() ); assertFalse( overrides[1].column().insertable() ); assertFalse( overrides[1].column().updatable() ); assertEquals( "int", overrides[1].column().columnDefinition() ); assertEquals( "table1", overrides[1].column().table() ); assertEquals( 50, overrides[1].column().length() ); assertEquals( 2, overrides[1].column().precision() ); assertEquals( 1, overrides[1].column().scale() ); } public void testMapKeyColumnNoAttributes() throws Exception { reader = getReader( Entity3.class, "field1", "element-collection.orm12.xml" ); assertAnnotationPresent( ElementCollection.class ); assertAnnotationNotPresent( MapKey.class ); assertAnnotationNotPresent( MapKeyClass.class ); assertAnnotationNotPresent( MapKeyTemporal.class ); assertAnnotationNotPresent( MapKeyEnumerated.class ); assertAnnotationPresent( MapKeyColumn.class ); assertAnnotationNotPresent( MapKeyJoinColumns.class ); assertAnnotationNotPresent( MapKeyJoinColumn.class ); MapKeyColumn keyColAnno = reader.getAnnotation( MapKeyColumn.class ); assertEquals( "", keyColAnno.columnDefinition() ); assertEquals( "", keyColAnno.name() ); assertEquals( "", keyColAnno.table() ); assertFalse( keyColAnno.nullable() ); assertTrue( keyColAnno.insertable() ); assertFalse( keyColAnno.unique() ); assertTrue( keyColAnno.updatable() ); assertEquals( 255, keyColAnno.length() ); assertEquals( 0, keyColAnno.precision() ); assertEquals( 0, keyColAnno.scale() ); } public void testMapKeyColumnAllAttributes() throws Exception { reader = getReader( Entity3.class, "field1", "element-collection.orm13.xml" ); assertAnnotationPresent( ElementCollection.class ); assertAnnotationNotPresent( MapKey.class ); assertAnnotationNotPresent( MapKeyClass.class ); assertAnnotationNotPresent( MapKeyTemporal.class ); assertAnnotationNotPresent( MapKeyEnumerated.class ); assertAnnotationPresent( MapKeyColumn.class ); assertAnnotationNotPresent( MapKeyJoinColumns.class ); assertAnnotationNotPresent( MapKeyJoinColumn.class ); MapKeyColumn keyColAnno = reader.getAnnotation( MapKeyColumn.class ); assertEquals( "int", keyColAnno.columnDefinition() ); assertEquals( "col1", keyColAnno.name() ); assertEquals( "table1", keyColAnno.table() ); assertTrue( keyColAnno.nullable() ); assertFalse( keyColAnno.insertable() ); assertTrue( keyColAnno.unique() ); assertFalse( keyColAnno.updatable() ); assertEquals( 50, keyColAnno.length() ); assertEquals( 2, keyColAnno.precision() ); assertEquals( 1, keyColAnno.scale() ); } /** * When there's a single map key join column, we still wrap it with a * MapKeyJoinColumns annotation. */ public void testSingleMapKeyJoinColumn() throws Exception { reader = getReader( Entity3.class, "field1", "element-collection.orm14.xml" ); assertAnnotationPresent( ElementCollection.class ); assertAnnotationNotPresent( MapKey.class ); assertAnnotationNotPresent( MapKeyClass.class ); assertAnnotationNotPresent( MapKeyTemporal.class ); assertAnnotationNotPresent( MapKeyEnumerated.class ); assertAnnotationNotPresent( MapKeyColumn.class ); assertAnnotationPresent( MapKeyJoinColumns.class ); assertAnnotationNotPresent( MapKeyJoinColumn.class ); MapKeyJoinColumns joinColumnsAnno = reader .getAnnotation( MapKeyJoinColumns.class ); MapKeyJoinColumn[] joinColumns = joinColumnsAnno.value(); assertEquals( 1, joinColumns.length ); assertEquals( "col1", joinColumns[0].name() ); } public void testMultipleMapKeyJoinColumns() throws Exception { reader = getReader( Entity3.class, "field1", "element-collection.orm15.xml" ); assertAnnotationPresent( ElementCollection.class ); assertAnnotationNotPresent( MapKey.class ); assertAnnotationNotPresent( MapKeyClass.class ); assertAnnotationNotPresent( MapKeyTemporal.class ); assertAnnotationNotPresent( MapKeyEnumerated.class ); assertAnnotationNotPresent( MapKeyColumn.class ); assertAnnotationPresent( MapKeyJoinColumns.class ); assertAnnotationNotPresent( MapKeyJoinColumn.class ); MapKeyJoinColumns joinColumnsAnno = reader .getAnnotation( MapKeyJoinColumns.class ); MapKeyJoinColumn[] joinColumns = joinColumnsAnno.value(); assertEquals( 2, joinColumns.length ); assertEquals( "", joinColumns[0].name() ); assertEquals( "", joinColumns[0].referencedColumnName() ); assertFalse( joinColumns[0].unique() ); assertFalse( joinColumns[0].nullable() ); assertTrue( joinColumns[0].insertable() ); assertTrue( joinColumns[0].updatable() ); assertEquals( "", joinColumns[0].columnDefinition() ); assertEquals( "", joinColumns[0].table() ); assertEquals( "col1", joinColumns[1].name() ); assertEquals( "col2", joinColumns[1].referencedColumnName() ); assertTrue( joinColumns[1].unique() ); assertTrue( joinColumns[1].nullable() ); assertFalse( joinColumns[1].insertable() ); assertFalse( joinColumns[1].updatable() ); assertEquals( "int", joinColumns[1].columnDefinition() ); assertEquals( "table1", joinColumns[1].table() ); } public void testColumnNoAttributes() throws Exception { reader = getReader( Entity3.class, "field1", "element-collection.orm16.xml" ); assertAnnotationPresent( ElementCollection.class ); assertAnnotationPresent( Column.class ); Column column = reader.getAnnotation( Column.class ); assertEquals( "", column.name() ); assertFalse( column.unique() ); assertTrue( column.nullable() ); assertTrue( column.insertable() ); assertTrue( column.updatable() ); assertEquals( "", column.columnDefinition() ); assertEquals( "", column.table() ); assertEquals( 255, column.length() ); assertEquals( 0, column.precision() ); assertEquals( 0, column.scale() ); } public void testColumnAllAttributes() throws Exception { reader = getReader( Entity3.class, "field1", "element-collection.orm17.xml" ); assertAnnotationPresent( ElementCollection.class ); assertAnnotationPresent( Column.class ); Column column = reader.getAnnotation( Column.class ); assertEquals( "col1", column.name() ); assertTrue( column.unique() ); assertFalse( column.nullable() ); assertFalse( column.insertable() ); assertFalse( column.updatable() ); assertEquals( "int", column.columnDefinition() ); assertEquals( "table1", column.table() ); assertEquals( 50, column.length() ); assertEquals( 2, column.precision() ); assertEquals( 1, column.scale() ); } public void testTemporal() throws Exception { reader = getReader( Entity3.class, "field1", "element-collection.orm18.xml" ); assertAnnotationPresent( ElementCollection.class ); assertAnnotationPresent( Temporal.class ); assertAnnotationNotPresent( Enumerated.class ); assertAnnotationNotPresent( Lob.class ); assertEquals( TemporalType.DATE, reader.getAnnotation( Temporal.class ).value() ); } public void testEnumerated() throws Exception { reader = getReader( Entity3.class, "field1", "element-collection.orm19.xml" ); assertAnnotationPresent( ElementCollection.class ); assertAnnotationNotPresent( Temporal.class ); assertAnnotationPresent( Enumerated.class ); assertAnnotationNotPresent( Lob.class ); assertEquals( EnumType.STRING, reader.getAnnotation( Enumerated.class ).value() ); } public void testLob() throws Exception { reader = getReader( Entity3.class, "field1", "element-collection.orm20.xml" ); assertAnnotationPresent( ElementCollection.class ); assertAnnotationNotPresent( Temporal.class ); assertAnnotationNotPresent( Enumerated.class ); assertAnnotationPresent( Lob.class ); } /** * When there's a single attribute override, we still wrap it with an * AttributeOverrides annotation. */ public void testSingleAttributeOverride() throws Exception { reader = getReader( Entity3.class, "field1", "element-collection.orm21.xml" ); assertAnnotationPresent( ElementCollection.class ); assertAnnotationNotPresent( AttributeOverride.class ); assertAnnotationPresent( AttributeOverrides.class ); AttributeOverrides overridesAnno = reader .getAnnotation( AttributeOverrides.class ); AttributeOverride[] overrides = overridesAnno.value(); assertEquals( 1, overrides.length ); assertEquals( "field1", overrides[0].name() ); assertEquals( "col1", overrides[0].column().name() ); } public void testMultipleAttributeOverrides() throws Exception { reader = getReader( Entity3.class, "field1", "element-collection.orm22.xml" ); assertAnnotationPresent( ElementCollection.class ); assertAnnotationNotPresent( AttributeOverride.class ); assertAnnotationPresent( AttributeOverrides.class ); AttributeOverrides overridesAnno = reader .getAnnotation( AttributeOverrides.class ); AttributeOverride[] overrides = overridesAnno.value(); assertEquals( 2, overrides.length ); assertEquals( "field1", overrides[0].name() ); assertEquals( "", overrides[0].column().name() ); assertFalse( overrides[0].column().unique() ); assertTrue( overrides[0].column().nullable() ); assertTrue( overrides[0].column().insertable() ); assertTrue( overrides[0].column().updatable() ); assertEquals( "", overrides[0].column().columnDefinition() ); assertEquals( "", overrides[0].column().table() ); assertEquals( 255, overrides[0].column().length() ); assertEquals( 0, overrides[0].column().precision() ); assertEquals( 0, overrides[0].column().scale() ); assertEquals( "field2", overrides[1].name() ); assertEquals( "col1", overrides[1].column().name() ); assertTrue( overrides[1].column().unique() ); assertFalse( overrides[1].column().nullable() ); assertFalse( overrides[1].column().insertable() ); assertFalse( overrides[1].column().updatable() ); assertEquals( "int", overrides[1].column().columnDefinition() ); assertEquals( "table1", overrides[1].column().table() ); assertEquals( 50, overrides[1].column().length() ); assertEquals( 2, overrides[1].column().precision() ); assertEquals( 1, overrides[1].column().scale() ); } /** * Tests that map-key-attribute-override and attribute-override elements * both end up in the AttributeOverrides annotation. */ public void testMixedAttributeOverrides() throws Exception { reader = getReader( Entity3.class, "field1", "element-collection.orm23.xml" ); assertAnnotationPresent( ElementCollection.class ); assertAnnotationNotPresent( AttributeOverride.class ); assertAnnotationPresent( AttributeOverrides.class ); AttributeOverrides overridesAnno = reader .getAnnotation( AttributeOverrides.class ); AttributeOverride[] overrides = overridesAnno.value(); assertEquals( 2, overrides.length ); assertEquals( "field1", overrides[0].name() ); assertEquals( "col1", overrides[0].column().name() ); assertEquals( "field2", overrides[1].name() ); assertEquals( "col2", overrides[1].column().name() ); } /** * When there's a single association override, we still wrap it with an * AssociationOverrides annotation. */ public void testSingleAssociationOverride() throws Exception { reader = getReader( Entity3.class, "field1", "element-collection.orm24.xml" ); assertAnnotationPresent( ElementCollection.class ); assertAnnotationNotPresent( AssociationOverride.class ); assertAnnotationPresent( AssociationOverrides.class ); AssociationOverrides overridesAnno = reader.getAnnotation( AssociationOverrides.class ); AssociationOverride[] overrides = overridesAnno.value(); assertEquals( 1, overrides.length ); assertEquals( "association1", overrides[0].name() ); assertEquals( 0, overrides[0].joinColumns().length ); assertEquals( "", overrides[0].joinTable().name() ); } public void testMultipleAssociationOverridesJoinColumns() throws Exception { reader = getReader( Entity3.class, "field1", "element-collection.orm25.xml" ); assertAnnotationPresent( ElementCollection.class ); assertAnnotationNotPresent( AssociationOverride.class ); assertAnnotationPresent( AssociationOverrides.class ); AssociationOverrides overridesAnno = reader.getAnnotation( AssociationOverrides.class ); AssociationOverride[] overrides = overridesAnno.value(); assertEquals( 2, overrides.length ); //First, an association using join table assertEquals( "association1", overrides[0].name() ); assertEquals( 0, overrides[0].joinColumns().length ); JoinTable joinTableAnno = overrides[0].joinTable(); assertEquals( "catalog1", joinTableAnno.catalog() ); assertEquals( "table1", joinTableAnno.name() ); assertEquals( "schema1", joinTableAnno.schema() ); //JoinColumns JoinColumn[] joinColumns = joinTableAnno.joinColumns(); assertEquals( 2, joinColumns.length ); assertEquals( "", joinColumns[0].name() ); assertEquals( "", joinColumns[0].referencedColumnName() ); assertEquals( "", joinColumns[0].table() ); assertEquals( "", joinColumns[0].columnDefinition() ); assertTrue( joinColumns[0].insertable() ); assertTrue( joinColumns[0].updatable() ); assertTrue( joinColumns[0].nullable() ); assertFalse( joinColumns[0].unique() ); assertEquals( "col1", joinColumns[1].name() ); assertEquals( "col2", joinColumns[1].referencedColumnName() ); assertEquals( "table2", joinColumns[1].table() ); assertEquals( "int", joinColumns[1].columnDefinition() ); assertFalse( joinColumns[1].insertable() ); assertFalse( joinColumns[1].updatable() ); assertFalse( joinColumns[1].nullable() ); assertTrue( joinColumns[1].unique() ); //InverseJoinColumns JoinColumn[] inverseJoinColumns = joinTableAnno.inverseJoinColumns(); assertEquals( 2, inverseJoinColumns.length ); assertEquals( "", inverseJoinColumns[0].name() ); assertEquals( "", inverseJoinColumns[0].referencedColumnName() ); assertEquals( "", inverseJoinColumns[0].table() ); assertEquals( "", inverseJoinColumns[0].columnDefinition() ); assertTrue( inverseJoinColumns[0].insertable() ); assertTrue( inverseJoinColumns[0].updatable() ); assertTrue( inverseJoinColumns[0].nullable() ); assertFalse( inverseJoinColumns[0].unique() ); assertEquals( "col3", inverseJoinColumns[1].name() ); assertEquals( "col4", inverseJoinColumns[1].referencedColumnName() ); assertEquals( "table3", inverseJoinColumns[1].table() ); assertEquals( "int", inverseJoinColumns[1].columnDefinition() ); assertFalse( inverseJoinColumns[1].insertable() ); assertFalse( inverseJoinColumns[1].updatable() ); assertFalse( inverseJoinColumns[1].nullable() ); assertTrue( inverseJoinColumns[1].unique() ); //UniqueConstraints UniqueConstraint[] uniqueConstraints = joinTableAnno .uniqueConstraints(); assertEquals( 2, uniqueConstraints.length ); assertEquals( "", uniqueConstraints[0].name() ); assertEquals( 1, uniqueConstraints[0].columnNames().length ); assertEquals( "col5", uniqueConstraints[0].columnNames()[0] ); assertEquals( "uq1", uniqueConstraints[1].name() ); assertEquals( 2, uniqueConstraints[1].columnNames().length ); assertEquals( "col6", uniqueConstraints[1].columnNames()[0] ); assertEquals( "col7", uniqueConstraints[1].columnNames()[1] ); //Second, an association using join columns assertEquals( "association2", overrides[1].name() ); //JoinColumns joinColumns = overrides[1].joinColumns(); assertEquals( 2, joinColumns.length ); assertEquals( "", joinColumns[0].name() ); assertEquals( "", joinColumns[0].referencedColumnName() ); assertEquals( "", joinColumns[0].table() ); assertEquals( "", joinColumns[0].columnDefinition() ); assertTrue( joinColumns[0].insertable() ); assertTrue( joinColumns[0].updatable() ); assertTrue( joinColumns[0].nullable() ); assertFalse( joinColumns[0].unique() ); assertEquals( "col8", joinColumns[1].name() ); assertEquals( "col9", joinColumns[1].referencedColumnName() ); assertEquals( "table4", joinColumns[1].table() ); assertEquals( "int", joinColumns[1].columnDefinition() ); assertFalse( joinColumns[1].insertable() ); assertFalse( joinColumns[1].updatable() ); assertFalse( joinColumns[1].nullable() ); assertTrue( joinColumns[1].unique() ); } public void testCollectionTableNoChildren() throws Exception { reader = getReader( Entity3.class, "field1", "element-collection.orm26.xml" ); assertAnnotationPresent( ElementCollection.class ); assertAnnotationPresent( CollectionTable.class ); CollectionTable tableAnno = reader.getAnnotation( CollectionTable.class ); assertEquals( "", tableAnno.name() ); assertEquals( "", tableAnno.catalog() ); assertEquals( "", tableAnno.schema() ); assertEquals( 0, tableAnno.joinColumns().length ); assertEquals( 0, tableAnno.uniqueConstraints().length ); } public void testCollectionTableAllChildren() throws Exception { reader = getReader( Entity3.class, "field1", "element-collection.orm27.xml" ); assertAnnotationPresent( ElementCollection.class ); assertAnnotationPresent( CollectionTable.class ); CollectionTable tableAnno = reader.getAnnotation( CollectionTable.class ); assertEquals( "table1", tableAnno.name() ); assertEquals( "catalog1", tableAnno.catalog() ); assertEquals( "schema1", tableAnno.schema() ); //JoinColumns JoinColumn[] joinColumns = tableAnno.joinColumns(); assertEquals( 2, joinColumns.length ); assertEquals( "", joinColumns[0].name() ); assertEquals( "", joinColumns[0].referencedColumnName() ); assertEquals( "", joinColumns[0].table() ); assertEquals( "", joinColumns[0].columnDefinition() ); assertTrue( joinColumns[0].insertable() ); assertTrue( joinColumns[0].updatable() ); assertTrue( joinColumns[0].nullable() ); assertFalse( joinColumns[0].unique() ); assertEquals( "col1", joinColumns[1].name() ); assertEquals( "col2", joinColumns[1].referencedColumnName() ); assertEquals( "table2", joinColumns[1].table() ); assertEquals( "int", joinColumns[1].columnDefinition() ); assertFalse( joinColumns[1].insertable() ); assertFalse( joinColumns[1].updatable() ); assertFalse( joinColumns[1].nullable() ); assertTrue( joinColumns[1].unique() ); //UniqueConstraints UniqueConstraint[] uniqueConstraints = tableAnno.uniqueConstraints(); assertEquals( 2, uniqueConstraints.length ); assertEquals( "", uniqueConstraints[0].name() ); assertEquals( 1, uniqueConstraints[0].columnNames().length ); assertEquals( "col3", uniqueConstraints[0].columnNames()[0] ); assertEquals( "uq1", uniqueConstraints[1].name() ); assertEquals( 2, uniqueConstraints[1].columnNames().length ); assertEquals( "col4", uniqueConstraints[1].columnNames()[0] ); assertEquals( "col5", uniqueConstraints[1].columnNames()[1] ); } public void testAllAttributes() throws Exception { reader = getReader( Entity2.class, "field1", "element-collection.orm28.xml" ); assertAnnotationPresent( ElementCollection.class ); assertAnnotationNotPresent( OrderBy.class ); assertAnnotationNotPresent( OrderColumn.class ); assertAnnotationNotPresent( MapKey.class ); assertAnnotationNotPresent( MapKeyClass.class ); assertAnnotationNotPresent( MapKeyTemporal.class ); assertAnnotationNotPresent( MapKeyEnumerated.class ); assertAnnotationNotPresent( MapKeyColumn.class ); assertAnnotationNotPresent( MapKeyJoinColumns.class ); assertAnnotationNotPresent( MapKeyJoinColumn.class ); assertAnnotationNotPresent( Column.class ); assertAnnotationNotPresent( Temporal.class ); assertAnnotationNotPresent( Enumerated.class ); assertAnnotationNotPresent( Lob.class ); assertAnnotationNotPresent( AttributeOverride.class ); assertAnnotationNotPresent( AttributeOverrides.class ); assertAnnotationNotPresent( AssociationOverride.class ); assertAnnotationNotPresent( AssociationOverrides.class ); assertAnnotationNotPresent( CollectionTable.class ); assertAnnotationPresent( Access.class ); ElementCollection relAnno = reader.getAnnotation( ElementCollection.class ); assertEquals( FetchType.EAGER, relAnno.fetch() ); assertEquals( Entity3.class, relAnno.targetClass() ); assertEquals( AccessType.PROPERTY, reader.getAnnotation( Access.class ) .value() ); } }
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.jetbrains.python.documentation.docstrings; import com.google.common.collect.Lists; import com.intellij.lang.annotation.HighlightSeverity; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.TextRange; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiReference; import com.intellij.psi.PsiReferenceBase; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.util.ArrayUtil; import com.intellij.util.IncorrectOperationException; import com.jetbrains.python.PyNames; import com.jetbrains.python.psi.*; import com.jetbrains.python.psi.impl.ParamHelper; import com.jetbrains.python.psi.types.TypeEvalContext; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; /** * @author yole */ public class DocStringParameterReference extends PsiReferenceBase<PyStringLiteralExpression> implements PsiReferenceEx { private final ReferenceType myType; public DocStringParameterReference(PyStringLiteralExpression element, TextRange range, ReferenceType refType) { super(element, range); myType = refType; } public enum ReferenceType {PARAMETER, PARAMETER_TYPE, KEYWORD, VARIABLE, CLASS_VARIABLE, INSTANCE_VARIABLE, GLOBAL_VARIABLE} @Override public PsiElement resolve() { PyDocStringOwner owner = PsiTreeUtil.getParentOfType(getElement(), PyDocStringOwner.class); if (owner instanceof PyFunction) { return resolveParameter((PyFunction)owner); } if (owner instanceof PyClass) { final PyFunction init = ((PyClass)owner).findMethodByName(PyNames.INIT, false, null); if (init != null) { PsiElement element = resolveParameter(init); if (element == null && (myType.equals(ReferenceType.CLASS_VARIABLE) || myType.equals(ReferenceType.PARAMETER_TYPE))) { element = resolveClassVariable((PyClass)owner); } if (element == null && (myType.equals(ReferenceType.INSTANCE_VARIABLE) || myType.equals(ReferenceType.PARAMETER_TYPE))) { element = resolveInstanceVariable((PyClass)owner); } return element; } else { PsiElement element = null; if (myType.equals(ReferenceType.CLASS_VARIABLE) || myType.equals(ReferenceType.PARAMETER_TYPE)) { element = resolveClassVariable((PyClass)owner); } if (element == null && (myType.equals(ReferenceType.INSTANCE_VARIABLE) || myType.equals(ReferenceType.PARAMETER_TYPE))) { element = resolveInstanceVariable((PyClass)owner); } return element; } } if (owner instanceof PyFile && myType == ReferenceType.GLOBAL_VARIABLE) { return resolveGlobalVariable(((PyFile)owner)); } return null; } @Nullable private PsiElement resolveGlobalVariable(@NotNull PyFile owner) { for (PyTargetExpression assignment : owner.getTopLevelAttributes()) { if (getCanonicalText().equals(assignment.getName())) { return assignment; } } return null; } @Nullable private PsiElement resolveInstanceVariable(final PyClass owner) { final List<PyTargetExpression> attributes = owner.getInstanceAttributes(); for (PyTargetExpression element : attributes) { if (getCanonicalText().equals(element.getName())) { return element; } } return null; } @Nullable private PsiElement resolveClassVariable(@NotNull final PyClass owner) { final List<PyTargetExpression> attributes = owner.getClassAttributes(); for (PyTargetExpression element : attributes) { if (getCanonicalText().equals(element.getName())) { return element; } } return null; } @Nullable private PsiElement resolveParameter(PyFunction owner) { final PyParameterList parameterList = owner.getParameterList(); final PyNamedParameter resolved = parameterList.findParameterByName(getCanonicalText()); if (resolved != null) { return resolved; } for (PyParameter parameter : parameterList.getParameters()) { if (parameter instanceof PyNamedParameter) { final PyNamedParameter namedParameter = (PyNamedParameter)parameter; if (namedParameter.isKeywordContainer() || namedParameter.isPositionalContainer()) { return namedParameter; } } } return null; } @NotNull @Override public Object[] getVariants() { // see PyDocstringCompletionContributor return ArrayUtil.EMPTY_OBJECT_ARRAY; } @NotNull public List<PyNamedParameter> collectParameterVariants() { PyDocStringOwner owner = PsiTreeUtil.getParentOfType(getElement(), PyDocStringOwner.class); if (owner instanceof PyFunction) { List<PyNamedParameter> result = Lists.newArrayList(); final List<PyNamedParameter> namedParameters = ParamHelper.collectNamedParameters(((PyFunction)owner).getParameterList()); Set<String> usedParameters = new HashSet<>(); PyStringLiteralExpression expression = PsiTreeUtil.getParentOfType(getElement(), PyStringLiteralExpression.class, false); if (expression != null) { PsiReference[] references = expression.getReferences(); for (PsiReference ref : references) { if (ref instanceof DocStringParameterReference && ((DocStringParameterReference)ref).getType().equals(myType)) { usedParameters.add(ref.getCanonicalText()); } } } for (PyNamedParameter param : namedParameters) { if (!usedParameters.contains(param.getName())) { result.add(param); } } return result; } return Collections.emptyList(); } public ReferenceType getType() { return myType; } @Nullable @Override public HighlightSeverity getUnresolvedHighlightSeverity(TypeEvalContext context) { return HighlightSeverity.WEAK_WARNING; } @Nullable @Override public String getUnresolvedDescription() { PyDocStringOwner owner = PsiTreeUtil.getParentOfType(getElement(), PyDocStringOwner.class); if (owner instanceof PyFunction) { PyFunction function = (PyFunction)owner; return "Function '" + function.getName() + "' does not have a parameter '" + getCanonicalText() + "'"; } return null; } @Override public PsiElement handleElementRename(@NotNull String newElementName) throws IncorrectOperationException { TextRange range = getRangeInElement(); Pair<String, String> quotes = PyStringLiteralUtil.getQuotes(range.substring(myElement.getText())); if (quotes != null) { range = TextRange.create(range.getStartOffset() + quotes.first.length(), range.getEndOffset() - quotes.second.length()); } String newName = range.replace(myElement.getText(), newElementName); myElement.replace(PyElementGenerator.getInstance(myElement.getProject()).createStringLiteralAlreadyEscaped(newName)); return myElement; } }
// Copyright (c) 2003-2014, Jodd Team (jodd.org). All Rights Reserved. package jodd.util; import jodd.JoddCore; import java.io.ByteArrayOutputStream; import java.io.UnsupportedEncodingException; import java.util.regex.Matcher; import java.util.regex.Pattern; import static jodd.util.CharUtil.isAlpha; import static jodd.util.CharUtil.isDigit; import static jodd.util.CharUtil.isPchar; import static jodd.util.CharUtil.isSubDelimiter; import static jodd.util.CharUtil.isUnreserved; /** * Encodes URLs correctly, significantly faster and more convenient. * <p> * Here is an example of full URL: * {@literal https://jodd:ddoj@www.jodd.org:8080/file;p=1?q=2#third}. * It consist of: * <ul> * <li>scheme (https)</li> * <li>user (jodd)</li> * <li>password (ddoj)</li> * <li>host (www.jodd.org)</li> * <li>port (8080)</li> * <li>path (file)</li> * <li>path parameter (p=1)</li> * <li>query parameter (q=2)</li> * <li>fragment (third)</li> * </ul> * Each URL part has its own encoding rules. The <b>only</b> correct way of * encoding URLs is to encode each part separately, and then to concatenate * results. For easier query building you can use {@link #build(String) builder}. * It provides fluent interface for defining query parameters. */ public class URLCoder { private static final String SCHEME_PATTERN = "([^:/?#]+):"; private static final String HTTP_PATTERN = "(http|https):"; private static final String USERINFO_PATTERN = "([^@/]*)"; private static final String HOST_PATTERN = "([^/?#:]*)"; private static final String PORT_PATTERN = "(\\d*)"; private static final String PATH_PATTERN = "([^?#]*)"; private static final String QUERY_PATTERN = "([^#]*)"; private static final String LAST_PATTERN = "(.*)"; // Regex patterns that matches URIs. See RFC 3986, appendix B private static final Pattern URI_PATTERN = Pattern.compile( "^(" + SCHEME_PATTERN + ")?" + "(//(" + USERINFO_PATTERN + "@)?" + HOST_PATTERN + "(:" + PORT_PATTERN + ")?" + ")?" + PATH_PATTERN + "(\\?" + QUERY_PATTERN + ")?" + "(#" + LAST_PATTERN + ")?"); private static final Pattern HTTP_URL_PATTERN = Pattern.compile( '^' + HTTP_PATTERN + "(//(" + USERINFO_PATTERN + "@)?" + HOST_PATTERN + "(:" + PORT_PATTERN + ")?" + ")?" + PATH_PATTERN + "(\\?" + LAST_PATTERN + ")?"); /** * Enumeration to identify the parts of a URI. * <p> * Contains methods to indicate whether a given character is valid in a specific URI component. * * @see <a href="http://www.ietf.org/rfc/rfc3986.txt">RFC 3986</a> */ enum URIPart { SCHEME { @Override public boolean isValid(char c) { return isAlpha(c) || isDigit(c) || c == '+' || c == '-' || c == '.'; } }, // AUTHORITY { // @Override // public boolean isValid(char c) { // return isUnreserved(c) || isSubDelimiter(c) || c == ':' || c == '@'; // } // }, USER_INFO { @Override public boolean isValid(char c) { return isUnreserved(c) || isSubDelimiter(c) || c == ':'; } }, HOST { @Override public boolean isValid(char c) { return isUnreserved(c) || isSubDelimiter(c); } }, PORT { @Override public boolean isValid(char c) { return isDigit(c); } }, PATH { @Override public boolean isValid(char c) { return isPchar(c) || c == '/'; } }, PATH_SEGMENT { @Override public boolean isValid(char c) { return isPchar(c); } }, QUERY { @Override public boolean isValid(char c) { return isPchar(c) || c == '/' || c == '?'; } }, QUERY_PARAM { @Override public boolean isValid(char c) { if (c == '=' || c == '+' || c == '&') { return false; } return isPchar(c) || c == '/' || c == '?'; } }, FRAGMENT { @Override public boolean isValid(char c) { return isPchar(c) || c == '/' || c == '?'; } }; /** * Indicates whether the given character is allowed in this URI component. * * @return <code>true</code> if the character is allowed; {@code false} otherwise */ public abstract boolean isValid(char c); } // ---------------------------------------------------------------- util methods /** * Encodes single URI component. */ private static String encodeUriComponent(String source, String encoding, URIPart uriPart) { if (source == null) { return null; } byte[] bytes; try { bytes = encodeBytes(source.getBytes(encoding), uriPart); } catch (UnsupportedEncodingException ignore) { return null; } char[] chars = new char[bytes.length]; for (int i = 0; i < bytes.length; i++) { chars[i] = (char) bytes[i]; } return new String(chars); } /** * Encodes byte array using allowed characters from {@link URIPart}. */ private static byte[] encodeBytes(byte[] source, URIPart uriPart) { ByteArrayOutputStream bos = new ByteArrayOutputStream(source.length); for (byte b : source) { if (b < 0) { b += 256; } if (uriPart.isValid((char) b)) { bos.write(b); } else { bos.write('%'); char hex1 = Character.toUpperCase(Character.forDigit((b >> 4) & 0xF, 16)); char hex2 = Character.toUpperCase(Character.forDigit(b & 0xF, 16)); bos.write(hex1); bos.write(hex2); } } return bos.toByteArray(); } // ---------------------------------------------------------------- main methods /** * Encodes the given URI scheme with the given encoding. */ public static String encodeScheme(String scheme, String encoding) { return encodeUriComponent(scheme, encoding, URIPart.SCHEME); } public static String encodeScheme(String scheme) { return encodeUriComponent(scheme, JoddCore.encoding, URIPart.SCHEME); } /* /** * Encodes the given URI authority with the given encoding. * public static String encodeAuthority(String authority, String encoding) { return encodeUriComponent(authority, encoding, URIPart.AUTHORITY); } public static String encodeAuthority(String authority) { return encodeUriComponent(authority, JoddCore.encoding, URIPart.AUTHORITY); } */ /** * Encodes the given URI user info with the given encoding. */ public static String encodeUserInfo(String userInfo, String encoding) { return encodeUriComponent(userInfo, encoding, URIPart.USER_INFO); } public static String encodeUserInfo(String userInfo) { return encodeUriComponent(userInfo, JoddCore.encoding, URIPart.USER_INFO); } /** * Encodes the given URI host with the given encoding. */ public static String encodeHost(String host, String encoding) { return encodeUriComponent(host, encoding, URIPart.HOST); } public static String encodeHost(String host) { return encodeUriComponent(host, JoddCore.encoding, URIPart.HOST); } /** * Encodes the given URI port with the given encoding. */ public static String encodePort(String port, String encoding) { return encodeUriComponent(port, encoding, URIPart.PORT); } public static String encodePort(String port) { return encodeUriComponent(port, JoddCore.encoding, URIPart.PORT); } /** * Encodes the given URI path with the given encoding. */ public static String encodePath(String path, String encoding) { return encodeUriComponent(path, encoding, URIPart.PATH); } public static String encodePath(String path) { return encodeUriComponent(path, JoddCore.encoding, URIPart.PATH); } /** * Encodes the given URI path segment with the given encoding. */ public static String encodePathSegment(String segment, String encoding) { return encodeUriComponent(segment, encoding, URIPart.PATH_SEGMENT); } public static String encodePathSegment(String segment) { return encodeUriComponent(segment, JoddCore.encoding, URIPart.PATH_SEGMENT); } /** * Encodes the given URI query with the given encoding. */ public static String encodeQuery(String query, String encoding) { return encodeUriComponent(query, encoding, URIPart.QUERY); } public static String encodeQuery(String query) { return encodeUriComponent(query, JoddCore.encoding, URIPart.QUERY); } /** * Encodes the given URI query parameter with the given encoding. */ public static String encodeQueryParam(String queryParam, String encoding) { return encodeUriComponent(queryParam, encoding, URIPart.QUERY_PARAM); } public static String encodeQueryParam(String queryParam) { return encodeUriComponent(queryParam, JoddCore.encoding, URIPart.QUERY_PARAM); } /** * Encodes the given URI fragment with the given encoding. */ public static String encodeFragment(String fragment, String encoding) { return encodeUriComponent(fragment, encoding, URIPart.FRAGMENT); } public static String encodeFragment(String fragment) { return encodeUriComponent(fragment, JoddCore.encoding, URIPart.FRAGMENT); } // ---------------------------------------------------------------- url /** * @see #encodeUri(String, String) */ public static String encodeUri(String uri) { return encodeUri(uri, JoddCore.encoding); } /** * Encodes the given source URI into an encoded String. All various URI components are * encoded according to their respective valid character sets. * <p>This method does <b>not</b> attempt to encode "=" and "&" * characters in query parameter names and query parameter values because they cannot * be parsed in a reliable way. */ public static String encodeUri(String uri, String encoding) { Matcher m = URI_PATTERN.matcher(uri); if (m.matches()) { String scheme = m.group(2); String authority = m.group(3); String userinfo = m.group(5); String host = m.group(6); String port = m.group(8); String path = m.group(9); String query = m.group(11); String fragment = m.group(13); return encodeUriComponents(scheme, authority, userinfo, host, port, path, query, fragment, encoding); } throw new IllegalArgumentException("Invalid URI: " + uri); } /** * @see #encodeHttpUrl(String, String) */ public static String encodeHttpUrl(String httpUrl) { return encodeHttpUrl(httpUrl, JoddCore.encoding); } /** * Encodes the given HTTP URI into an encoded String. All various URI components are * encoded according to their respective valid character sets. * <p>This method does <b>not</b> support fragments ({@code #}), * as these are not supposed to be sent to the server, but retained by the client. * <p>This method does <b>not</b> attempt to encode "=" and "&" * characters in query parameter names and query parameter values because they cannot * be parsed in a reliable way. */ public static String encodeHttpUrl(String httpUrl, String encoding) { Matcher m = HTTP_URL_PATTERN.matcher(httpUrl); if (m.matches()) { String scheme = m.group(1); String authority = m.group(2); String userinfo = m.group(4); String host = m.group(5); String portString = m.group(7); String path = m.group(8); String query = m.group(10); return encodeUriComponents(scheme, authority, userinfo, host, portString, path, query, null, encoding); } throw new IllegalArgumentException("Invalid HTTP URL: " + httpUrl); } private static String encodeUriComponents( String scheme, String authority, String userInfo, String host, String port, String path, String query, String fragment, String encoding) { StringBuilder sb = new StringBuilder(); if (scheme != null) { sb.append(encodeScheme(scheme, encoding)); sb.append(':'); } if (authority != null) { sb.append("//"); if (userInfo != null) { sb.append(encodeUserInfo(userInfo, encoding)); sb.append('@'); } if (host != null) { sb.append(encodeHost(host, encoding)); } if (port != null) { sb.append(':'); sb.append(encodePort(port, encoding)); } } sb.append(encodePath(path, encoding)); if (query != null) { sb.append('?'); sb.append(encodeQuery(query, encoding)); } if (fragment != null) { sb.append('#'); sb.append(encodeFragment(fragment, encoding)); } return sb.toString(); } // ---------------------------------------------------------------- builder /** * Creates URL builder for user-friendly way of building URLs. * Provided path is parsed and {@link #encodeUri(String) encoded}. * @see #build(String, boolean) */ public static Builder build(String path) { return build(path, true); } /** * Creates URL builder with given path that can be optionally encoded. * Since most of the time path is valid and does not require to be encoded, * use this method to gain some performance. When encoding flag is turned off, * provided path is used without processing. * <p> * The purpose of builder is to help with query parameters. All other URI parts * should be set previously or after the URL is built. */ public static Builder build(String path, boolean encodePath) { return new Builder(path, encodePath, JoddCore.encoding); } public static class Builder { protected final StringBuilder url; protected final String encoding; protected boolean hasParams; public Builder(String path, boolean encodePath, String encoding) { this.encoding = encoding; url = new StringBuilder(); if (encodePath) { url.append(encodeUri(path, encoding)); } else { url.append(path); } this.hasParams = url.indexOf(StringPool.QUESTION_MARK) != -1; } /** * Appends new query parameter to the url. */ public Builder queryParam(String name, String value) { url.append(hasParams ? '&' : '?'); hasParams = true; url.append(encodeQueryParam(name, encoding)); if ((value != null) && (value.length() > 0)) { url.append('='); url.append(encodeQueryParam(value, encoding)); } return this; } /** * Returns full URL. */ @Override public String toString() { return url.toString(); } } }
/* * Copyright 2021 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.retail.v2.stub; import com.google.api.HttpBody; import com.google.api.core.ApiFunction; import com.google.api.core.BetaApi; import com.google.api.gax.core.GaxProperties; import com.google.api.gax.core.GoogleCredentialsProvider; import com.google.api.gax.core.InstantiatingExecutorProvider; import com.google.api.gax.grpc.GaxGrpcProperties; import com.google.api.gax.grpc.GrpcTransportChannel; import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider; import com.google.api.gax.grpc.ProtoOperationTransformers; import com.google.api.gax.longrunning.OperationSnapshot; import com.google.api.gax.longrunning.OperationTimedPollAlgorithm; import com.google.api.gax.retrying.RetrySettings; import com.google.api.gax.rpc.ApiClientHeaderProvider; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.OperationCallSettings; import com.google.api.gax.rpc.StatusCode; import com.google.api.gax.rpc.StubSettings; import com.google.api.gax.rpc.TransportChannelProvider; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.cloud.retail.v2.CollectUserEventRequest; import com.google.cloud.retail.v2.ImportMetadata; import com.google.cloud.retail.v2.ImportUserEventsRequest; import com.google.cloud.retail.v2.ImportUserEventsResponse; import com.google.cloud.retail.v2.PurgeMetadata; import com.google.cloud.retail.v2.PurgeUserEventsRequest; import com.google.cloud.retail.v2.PurgeUserEventsResponse; import com.google.cloud.retail.v2.RejoinUserEventsMetadata; import com.google.cloud.retail.v2.RejoinUserEventsRequest; import com.google.cloud.retail.v2.RejoinUserEventsResponse; import com.google.cloud.retail.v2.UserEvent; import com.google.cloud.retail.v2.WriteUserEventRequest; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.longrunning.Operation; import java.io.IOException; import java.util.List; import javax.annotation.Generated; import org.threeten.bp.Duration; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link UserEventServiceStub}. * * <p>The default instance has everything set to sensible defaults: * * <ul> * <li>The default service address (retail.googleapis.com) and default port (443) are used. * <li>Credentials are acquired automatically through Application Default Credentials. * <li>Retries are configured for idempotent methods but not for non-idempotent methods. * </ul> * * <p>The builder of this class is recursive, so contained classes are themselves builders. When * build() is called, the tree of builders is called to create the complete settings object. * * <p>For example, to set the total timeout of writeUserEvent to 30 seconds: * * <pre>{@code * UserEventServiceStubSettings.Builder userEventServiceSettingsBuilder = * UserEventServiceStubSettings.newBuilder(); * userEventServiceSettingsBuilder * .writeUserEventSettings() * .setRetrySettings( * userEventServiceSettingsBuilder * .writeUserEventSettings() * .getRetrySettings() * .toBuilder() * .setTotalTimeout(Duration.ofSeconds(30)) * .build()); * UserEventServiceStubSettings userEventServiceSettings = userEventServiceSettingsBuilder.build(); * }</pre> */ @Generated("by gapic-generator-java") public class UserEventServiceStubSettings extends StubSettings<UserEventServiceStubSettings> { /** The default scopes of the service. */ private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES = ImmutableList.<String>builder().add("https://www.googleapis.com/auth/cloud-platform").build(); private final UnaryCallSettings<WriteUserEventRequest, UserEvent> writeUserEventSettings; private final UnaryCallSettings<CollectUserEventRequest, HttpBody> collectUserEventSettings; private final UnaryCallSettings<PurgeUserEventsRequest, Operation> purgeUserEventsSettings; private final OperationCallSettings< PurgeUserEventsRequest, PurgeUserEventsResponse, PurgeMetadata> purgeUserEventsOperationSettings; private final UnaryCallSettings<ImportUserEventsRequest, Operation> importUserEventsSettings; private final OperationCallSettings< ImportUserEventsRequest, ImportUserEventsResponse, ImportMetadata> importUserEventsOperationSettings; private final UnaryCallSettings<RejoinUserEventsRequest, Operation> rejoinUserEventsSettings; private final OperationCallSettings< RejoinUserEventsRequest, RejoinUserEventsResponse, RejoinUserEventsMetadata> rejoinUserEventsOperationSettings; /** Returns the object with the settings used for calls to writeUserEvent. */ public UnaryCallSettings<WriteUserEventRequest, UserEvent> writeUserEventSettings() { return writeUserEventSettings; } /** Returns the object with the settings used for calls to collectUserEvent. */ public UnaryCallSettings<CollectUserEventRequest, HttpBody> collectUserEventSettings() { return collectUserEventSettings; } /** Returns the object with the settings used for calls to purgeUserEvents. */ public UnaryCallSettings<PurgeUserEventsRequest, Operation> purgeUserEventsSettings() { return purgeUserEventsSettings; } /** Returns the object with the settings used for calls to purgeUserEvents. */ public OperationCallSettings<PurgeUserEventsRequest, PurgeUserEventsResponse, PurgeMetadata> purgeUserEventsOperationSettings() { return purgeUserEventsOperationSettings; } /** Returns the object with the settings used for calls to importUserEvents. */ public UnaryCallSettings<ImportUserEventsRequest, Operation> importUserEventsSettings() { return importUserEventsSettings; } /** Returns the object with the settings used for calls to importUserEvents. */ public OperationCallSettings<ImportUserEventsRequest, ImportUserEventsResponse, ImportMetadata> importUserEventsOperationSettings() { return importUserEventsOperationSettings; } /** Returns the object with the settings used for calls to rejoinUserEvents. */ public UnaryCallSettings<RejoinUserEventsRequest, Operation> rejoinUserEventsSettings() { return rejoinUserEventsSettings; } /** Returns the object with the settings used for calls to rejoinUserEvents. */ public OperationCallSettings< RejoinUserEventsRequest, RejoinUserEventsResponse, RejoinUserEventsMetadata> rejoinUserEventsOperationSettings() { return rejoinUserEventsOperationSettings; } @BetaApi("A restructuring of stub classes is planned, so this may break in the future") public UserEventServiceStub createStub() throws IOException { if (getTransportChannelProvider() .getTransportName() .equals(GrpcTransportChannel.getGrpcTransportName())) { return GrpcUserEventServiceStub.create(this); } throw new UnsupportedOperationException( String.format( "Transport not supported: %s", getTransportChannelProvider().getTransportName())); } /** Returns a builder for the default ExecutorProvider for this service. */ public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() { return InstantiatingExecutorProvider.newBuilder(); } /** Returns the default service endpoint. */ public static String getDefaultEndpoint() { return "retail.googleapis.com:443"; } /** Returns the default mTLS service endpoint. */ public static String getDefaultMtlsEndpoint() { return "retail.mtls.googleapis.com:443"; } /** Returns the default service scopes. */ public static List<String> getDefaultServiceScopes() { return DEFAULT_SERVICE_SCOPES; } /** Returns a builder for the default credentials for this service. */ public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() { return GoogleCredentialsProvider.newBuilder() .setScopesToApply(DEFAULT_SERVICE_SCOPES) .setUseJwtAccessWithScope(true); } /** Returns a builder for the default ChannelProvider for this service. */ public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() { return InstantiatingGrpcChannelProvider.newBuilder() .setMaxInboundMessageSize(Integer.MAX_VALUE); } public static TransportChannelProvider defaultTransportChannelProvider() { return defaultGrpcTransportProviderBuilder().build(); } @BetaApi("The surface for customizing headers is not stable yet and may change in the future.") public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() { return ApiClientHeaderProvider.newBuilder() .setGeneratedLibToken( "gapic", GaxProperties.getLibraryVersion(UserEventServiceStubSettings.class)) .setTransportToken( GaxGrpcProperties.getGrpcTokenName(), GaxGrpcProperties.getGrpcVersion()); } /** Returns a new builder for this class. */ public static Builder newBuilder() { return Builder.createDefault(); } /** Returns a new builder for this class. */ public static Builder newBuilder(ClientContext clientContext) { return new Builder(clientContext); } /** Returns a builder containing all the values of this settings class. */ public Builder toBuilder() { return new Builder(this); } protected UserEventServiceStubSettings(Builder settingsBuilder) throws IOException { super(settingsBuilder); writeUserEventSettings = settingsBuilder.writeUserEventSettings().build(); collectUserEventSettings = settingsBuilder.collectUserEventSettings().build(); purgeUserEventsSettings = settingsBuilder.purgeUserEventsSettings().build(); purgeUserEventsOperationSettings = settingsBuilder.purgeUserEventsOperationSettings().build(); importUserEventsSettings = settingsBuilder.importUserEventsSettings().build(); importUserEventsOperationSettings = settingsBuilder.importUserEventsOperationSettings().build(); rejoinUserEventsSettings = settingsBuilder.rejoinUserEventsSettings().build(); rejoinUserEventsOperationSettings = settingsBuilder.rejoinUserEventsOperationSettings().build(); } /** Builder for UserEventServiceStubSettings. */ public static class Builder extends StubSettings.Builder<UserEventServiceStubSettings, Builder> { private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders; private final UnaryCallSettings.Builder<WriteUserEventRequest, UserEvent> writeUserEventSettings; private final UnaryCallSettings.Builder<CollectUserEventRequest, HttpBody> collectUserEventSettings; private final UnaryCallSettings.Builder<PurgeUserEventsRequest, Operation> purgeUserEventsSettings; private final OperationCallSettings.Builder< PurgeUserEventsRequest, PurgeUserEventsResponse, PurgeMetadata> purgeUserEventsOperationSettings; private final UnaryCallSettings.Builder<ImportUserEventsRequest, Operation> importUserEventsSettings; private final OperationCallSettings.Builder< ImportUserEventsRequest, ImportUserEventsResponse, ImportMetadata> importUserEventsOperationSettings; private final UnaryCallSettings.Builder<RejoinUserEventsRequest, Operation> rejoinUserEventsSettings; private final OperationCallSettings.Builder< RejoinUserEventsRequest, RejoinUserEventsResponse, RejoinUserEventsMetadata> rejoinUserEventsOperationSettings; private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>> RETRYABLE_CODE_DEFINITIONS; static { ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions = ImmutableMap.builder(); definitions.put( "retry_policy_0_codes", ImmutableSet.copyOf( Lists.<StatusCode.Code>newArrayList( StatusCode.Code.UNAVAILABLE, StatusCode.Code.DEADLINE_EXCEEDED))); definitions.put( "retry_policy_1_codes", ImmutableSet.copyOf( Lists.<StatusCode.Code>newArrayList( StatusCode.Code.UNAVAILABLE, StatusCode.Code.DEADLINE_EXCEEDED))); definitions.put( "retry_policy_3_codes", ImmutableSet.copyOf( Lists.<StatusCode.Code>newArrayList( StatusCode.Code.UNAVAILABLE, StatusCode.Code.DEADLINE_EXCEEDED))); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS; static { ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder(); RetrySettings settings = null; settings = RetrySettings.newBuilder() .setInitialRetryDelay(Duration.ofMillis(100L)) .setRetryDelayMultiplier(1.3) .setMaxRetryDelay(Duration.ofMillis(5000L)) .setInitialRpcTimeout(Duration.ofMillis(5000L)) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeout(Duration.ofMillis(5000L)) .setTotalTimeout(Duration.ofMillis(5000L)) .build(); definitions.put("retry_policy_0_params", settings); settings = RetrySettings.newBuilder() .setInitialRetryDelay(Duration.ofMillis(100L)) .setRetryDelayMultiplier(1.3) .setMaxRetryDelay(Duration.ofMillis(30000L)) .setInitialRpcTimeout(Duration.ofMillis(30000L)) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeout(Duration.ofMillis(30000L)) .setTotalTimeout(Duration.ofMillis(30000L)) .build(); definitions.put("retry_policy_1_params", settings); settings = RetrySettings.newBuilder() .setInitialRetryDelay(Duration.ofMillis(100L)) .setRetryDelayMultiplier(1.3) .setMaxRetryDelay(Duration.ofMillis(300000L)) .setInitialRpcTimeout(Duration.ofMillis(600000L)) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeout(Duration.ofMillis(600000L)) .setTotalTimeout(Duration.ofMillis(600000L)) .build(); definitions.put("retry_policy_3_params", settings); RETRY_PARAM_DEFINITIONS = definitions.build(); } protected Builder() { this(((ClientContext) null)); } protected Builder(ClientContext clientContext) { super(clientContext); writeUserEventSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); collectUserEventSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); purgeUserEventsSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); purgeUserEventsOperationSettings = OperationCallSettings.newBuilder(); importUserEventsSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); importUserEventsOperationSettings = OperationCallSettings.newBuilder(); rejoinUserEventsSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); rejoinUserEventsOperationSettings = OperationCallSettings.newBuilder(); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder<?, ?>>of( writeUserEventSettings, collectUserEventSettings, purgeUserEventsSettings, importUserEventsSettings, rejoinUserEventsSettings); initDefaults(this); } protected Builder(UserEventServiceStubSettings settings) { super(settings); writeUserEventSettings = settings.writeUserEventSettings.toBuilder(); collectUserEventSettings = settings.collectUserEventSettings.toBuilder(); purgeUserEventsSettings = settings.purgeUserEventsSettings.toBuilder(); purgeUserEventsOperationSettings = settings.purgeUserEventsOperationSettings.toBuilder(); importUserEventsSettings = settings.importUserEventsSettings.toBuilder(); importUserEventsOperationSettings = settings.importUserEventsOperationSettings.toBuilder(); rejoinUserEventsSettings = settings.rejoinUserEventsSettings.toBuilder(); rejoinUserEventsOperationSettings = settings.rejoinUserEventsOperationSettings.toBuilder(); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder<?, ?>>of( writeUserEventSettings, collectUserEventSettings, purgeUserEventsSettings, importUserEventsSettings, rejoinUserEventsSettings); } private static Builder createDefault() { Builder builder = new Builder(((ClientContext) null)); builder.setTransportChannelProvider(defaultTransportChannelProvider()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build()); builder.setEndpoint(getDefaultEndpoint()); builder.setMtlsEndpoint(getDefaultMtlsEndpoint()); builder.setSwitchToMtlsEndpointAllowed(true); return initDefaults(builder); } private static Builder initDefaults(Builder builder) { builder .writeUserEventSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .collectUserEventSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .purgeUserEventsSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_1_params")); builder .importUserEventsSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_3_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_3_params")); builder .rejoinUserEventsSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .purgeUserEventsOperationSettings() .setInitialCallSettings( UnaryCallSettings .<PurgeUserEventsRequest, OperationSnapshot>newUnaryCallSettingsBuilder() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_1_params")) .build()) .setResponseTransformer( ProtoOperationTransformers.ResponseTransformer.create(PurgeUserEventsResponse.class)) .setMetadataTransformer( ProtoOperationTransformers.MetadataTransformer.create(PurgeMetadata.class)) .setPollingAlgorithm( OperationTimedPollAlgorithm.create( RetrySettings.newBuilder() .setInitialRetryDelay(Duration.ofMillis(5000L)) .setRetryDelayMultiplier(1.5) .setMaxRetryDelay(Duration.ofMillis(45000L)) .setInitialRpcTimeout(Duration.ZERO) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeout(Duration.ZERO) .setTotalTimeout(Duration.ofMillis(300000L)) .build())); builder .importUserEventsOperationSettings() .setInitialCallSettings( UnaryCallSettings .<ImportUserEventsRequest, OperationSnapshot>newUnaryCallSettingsBuilder() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_3_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_3_params")) .build()) .setResponseTransformer( ProtoOperationTransformers.ResponseTransformer.create(ImportUserEventsResponse.class)) .setMetadataTransformer( ProtoOperationTransformers.MetadataTransformer.create(ImportMetadata.class)) .setPollingAlgorithm( OperationTimedPollAlgorithm.create( RetrySettings.newBuilder() .setInitialRetryDelay(Duration.ofMillis(5000L)) .setRetryDelayMultiplier(1.5) .setMaxRetryDelay(Duration.ofMillis(45000L)) .setInitialRpcTimeout(Duration.ZERO) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeout(Duration.ZERO) .setTotalTimeout(Duration.ofMillis(300000L)) .build())); builder .rejoinUserEventsOperationSettings() .setInitialCallSettings( UnaryCallSettings .<RejoinUserEventsRequest, OperationSnapshot>newUnaryCallSettingsBuilder() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")) .build()) .setResponseTransformer( ProtoOperationTransformers.ResponseTransformer.create(RejoinUserEventsResponse.class)) .setMetadataTransformer( ProtoOperationTransformers.MetadataTransformer.create(RejoinUserEventsMetadata.class)) .setPollingAlgorithm( OperationTimedPollAlgorithm.create( RetrySettings.newBuilder() .setInitialRetryDelay(Duration.ofMillis(5000L)) .setRetryDelayMultiplier(1.5) .setMaxRetryDelay(Duration.ofMillis(45000L)) .setInitialRpcTimeout(Duration.ZERO) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeout(Duration.ZERO) .setTotalTimeout(Duration.ofMillis(300000L)) .build())); return builder; } /** * Applies the given settings updater function to all of the unary API methods in this service. * * <p>Note: This method does not support applying settings to streaming methods. */ public Builder applyToAllUnaryMethods( ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) { super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater); return this; } public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() { return unaryMethodSettingsBuilders; } /** Returns the builder for the settings used for calls to writeUserEvent. */ public UnaryCallSettings.Builder<WriteUserEventRequest, UserEvent> writeUserEventSettings() { return writeUserEventSettings; } /** Returns the builder for the settings used for calls to collectUserEvent. */ public UnaryCallSettings.Builder<CollectUserEventRequest, HttpBody> collectUserEventSettings() { return collectUserEventSettings; } /** Returns the builder for the settings used for calls to purgeUserEvents. */ public UnaryCallSettings.Builder<PurgeUserEventsRequest, Operation> purgeUserEventsSettings() { return purgeUserEventsSettings; } /** Returns the builder for the settings used for calls to purgeUserEvents. */ @BetaApi( "The surface for use by generated code is not stable yet and may change in the future.") public OperationCallSettings.Builder< PurgeUserEventsRequest, PurgeUserEventsResponse, PurgeMetadata> purgeUserEventsOperationSettings() { return purgeUserEventsOperationSettings; } /** Returns the builder for the settings used for calls to importUserEvents. */ public UnaryCallSettings.Builder<ImportUserEventsRequest, Operation> importUserEventsSettings() { return importUserEventsSettings; } /** Returns the builder for the settings used for calls to importUserEvents. */ @BetaApi( "The surface for use by generated code is not stable yet and may change in the future.") public OperationCallSettings.Builder< ImportUserEventsRequest, ImportUserEventsResponse, ImportMetadata> importUserEventsOperationSettings() { return importUserEventsOperationSettings; } /** Returns the builder for the settings used for calls to rejoinUserEvents. */ public UnaryCallSettings.Builder<RejoinUserEventsRequest, Operation> rejoinUserEventsSettings() { return rejoinUserEventsSettings; } /** Returns the builder for the settings used for calls to rejoinUserEvents. */ @BetaApi( "The surface for use by generated code is not stable yet and may change in the future.") public OperationCallSettings.Builder< RejoinUserEventsRequest, RejoinUserEventsResponse, RejoinUserEventsMetadata> rejoinUserEventsOperationSettings() { return rejoinUserEventsOperationSettings; } @Override public UserEventServiceStubSettings build() throws IOException { return new UserEventServiceStubSettings(this); } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.pinot.query; import com.facebook.presto.common.block.SortOrder; import com.facebook.presto.pinot.PinotConfig; import com.facebook.presto.pinot.PinotTableHandle; import com.facebook.presto.pinot.TestPinotQueryBase; import com.facebook.presto.spi.plan.AggregationNode; import com.facebook.presto.spi.plan.DistinctLimitNode; import com.facebook.presto.spi.plan.Ordering; import com.facebook.presto.spi.plan.OrderingScheme; import com.facebook.presto.spi.plan.PlanNode; import com.facebook.presto.spi.plan.ProjectNode; import com.facebook.presto.spi.plan.TableScanNode; import com.facebook.presto.spi.plan.TopNNode; import com.facebook.presto.spi.relation.VariableReferenceExpression; import com.facebook.presto.sql.planner.iterative.rule.test.PlanBuilder; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import org.testng.annotations.Test; import java.util.Arrays; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.NoSuchElementException; import java.util.Optional; import java.util.Set; import java.util.function.BiConsumer; import java.util.function.Function; import java.util.stream.Collectors; import static com.facebook.presto.common.type.BigintType.BIGINT; import static com.facebook.presto.common.type.DoubleType.DOUBLE; import static com.facebook.presto.common.type.TimestampType.TIMESTAMP; import static com.facebook.presto.common.type.VarcharType.VARCHAR; import static java.lang.String.format; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertFalse; import static org.testng.Assert.assertTrue; public class TestPinotQueryGenerator extends TestPinotQueryBase { protected static final PinotTableHandle pinotTable = realtimeOnlyTable; protected SessionHolder defaultSessionHolder = getDefaultSessionHolder(); public SessionHolder getDefaultSessionHolder() { return new SessionHolder(false, useSqlSyntax()); } public boolean useSqlSyntax() { return false; } private void testPinotQuery( PinotConfig givenPinotConfig, Function<PlanBuilder, PlanNode> planBuilderConsumer, String expectedPinotQuery, SessionHolder sessionHolder, Map<String, String> outputVariables) { PlanNode planNode = planBuilderConsumer.apply(createPlanBuilder(sessionHolder)); testPinotQuery(givenPinotConfig, planNode, expectedPinotQuery, sessionHolder, outputVariables); } protected void testPinotQuery( PinotConfig givenPinotConfig, PlanNode planNode, String expectedPinotQuery, SessionHolder sessionHolder, Map<String, String> outputVariables) { testPinotQuery(givenPinotConfig, planNode, ImmutableList.of(expectedPinotQuery), sessionHolder, outputVariables); } protected void testPinotQuery( PinotConfig givenPinotConfig, PlanNode planNode, List<String> expectedPinotQueries, SessionHolder sessionHolder, Map<String, String> outputVariables) { PinotQueryGenerator.PinotQueryGeneratorResult pinotQueryGeneratorResult = new PinotQueryGenerator(givenPinotConfig, functionAndTypeManager, functionAndTypeManager, standardFunctionResolution).generate(planNode, sessionHolder.getConnectorSession()).get(); String pinotQuery = pinotQueryGeneratorResult.getGeneratedPinotQuery().getQuery(); Set<String> expectedPinotQuerySet = new HashSet<>(); for (String expectedPinotQuery : expectedPinotQueries) { if (expectedPinotQuery.contains("__expressions__")) { String expressions = planNode.getOutputVariables().stream().map(v -> outputVariables.get(v.getName())).filter(v -> v != null).collect(Collectors.joining(", ")); expectedPinotQuery = expectedPinotQuery.replace("__expressions__", expressions); } expectedPinotQuerySet.add(expectedPinotQuery); } if (expectedPinotQuerySet.size() == 1) { assertEquals(pinotQuery, expectedPinotQuerySet.iterator().next()); } assertTrue(expectedPinotQuerySet.contains(pinotQuery), String.format("Expected Generated PinotQuery: %s in the set: [%s]", pinotQuery, Arrays.toString(expectedPinotQuerySet.toArray(new String[0])))); } private void testPinotQuery(Function<PlanBuilder, PlanNode> planBuilderConsumer, String expectedPinotQuery, SessionHolder sessionHolder, Map<String, String> outputVariables) { testPinotQuery(pinotConfig, planBuilderConsumer, expectedPinotQuery, sessionHolder, outputVariables); } private void testPinotQuery(Function<PlanBuilder, PlanNode> planBuilderConsumer, String expectedPinotQuery, SessionHolder sessionHolder) { testPinotQuery(planBuilderConsumer, expectedPinotQuery, sessionHolder, ImmutableMap.of()); } private void testPinotQuery(PinotConfig givenPinotConfig, Function<PlanBuilder, PlanNode> planBuilderConsumer, String expectedPinotQuery) { testPinotQuery(givenPinotConfig, planBuilderConsumer, expectedPinotQuery, defaultSessionHolder, ImmutableMap.of()); } private void testPinotQuery(PinotConfig givenPinotConfig, PlanNode planNode, String expectedPinotQuery) { testPinotQuery(givenPinotConfig, planNode, expectedPinotQuery, defaultSessionHolder, ImmutableMap.of()); } private void testPinotQuery(Function<PlanBuilder, PlanNode> planBuilderConsumer, String expectedPinotQuery) { testPinotQuery(planBuilderConsumer, expectedPinotQuery, defaultSessionHolder); } protected PlanNode buildPlan(Function<PlanBuilder, PlanNode> consumer) { PlanBuilder planBuilder = createPlanBuilder(defaultSessionHolder); return consumer.apply(planBuilder); } private void testUnaryAggregationHelper(BiConsumer<PlanBuilder, PlanBuilder.AggregationBuilder> aggregationFunctionBuilder, String expectedAggOutput) { PlanNode justScan = buildPlan(planBuilder -> tableScan(planBuilder, pinotTable, regionId, secondsSinceEpoch, city, fare)); PlanNode filter = buildPlan(planBuilder -> filter(planBuilder, tableScan(planBuilder, pinotTable, regionId, secondsSinceEpoch, city, fare), getRowExpression("fare > 3", defaultSessionHolder))); PlanNode anotherFilter = buildPlan(planBuilder -> filter(planBuilder, tableScan(planBuilder, pinotTable, regionId, secondsSinceEpoch, city, fare), getRowExpression("secondssinceepoch between 200 and 300 and regionid >= 40", defaultSessionHolder))); PlanNode filterWithMultiValue = buildPlan(planBuilder -> filter(planBuilder, tableScan(planBuilder, pinotTable, regionId, secondsSinceEpoch, city, fare, scores), getRowExpression("contains(scores, 100) OR contains(scores, 200)", defaultSessionHolder))); testPinotQuery( planBuilder -> planBuilder.aggregation(aggBuilder -> aggregationFunctionBuilder.accept(planBuilder, aggBuilder.source(justScan).globalGrouping())), format("SELECT %s FROM realtimeOnly", getExpectedAggOutput(expectedAggOutput, ""))); testPinotQuery( planBuilder -> planBuilder.aggregation(aggBuilder -> aggregationFunctionBuilder.accept(planBuilder, aggBuilder.source(filter).globalGrouping())), format("SELECT %s FROM realtimeOnly WHERE (fare > 3)", getExpectedAggOutput(expectedAggOutput, ""))); testPinotQuery( planBuilder -> planBuilder.aggregation(aggBuilder -> aggregationFunctionBuilder.accept(planBuilder, aggBuilder.source(filter).singleGroupingSet(variable("regionid")))), format("SELECT %s FROM realtimeOnly WHERE (fare > 3) GROUP BY regionId %s 10000", getExpectedAggOutput(expectedAggOutput, "regionId"), getGroupByLimitKey())); testPinotQuery( planBuilder -> planBuilder.aggregation(aggBuilder -> aggregationFunctionBuilder.accept(planBuilder, aggBuilder.source(justScan).singleGroupingSet(variable("regionid")))), format("SELECT %s FROM realtimeOnly GROUP BY regionId %s 10000", getExpectedAggOutput(expectedAggOutput, "regionId"), getGroupByLimitKey())); testPinotQuery( planBuilder -> planBuilder.aggregation(aggBuilder -> aggregationFunctionBuilder.accept(planBuilder, aggBuilder.source(anotherFilter).singleGroupingSet(variable("regionid"), variable("city")))), format("SELECT %s FROM realtimeOnly WHERE ((secondsSinceEpoch BETWEEN 200 AND 300) AND (regionId >= 40)) GROUP BY regionId, city %s 10000", getExpectedAggOutput(expectedAggOutput, "regionId, city"), getGroupByLimitKey())); testPinotQuery( planBuilder -> planBuilder.aggregation(aggBuilder -> aggregationFunctionBuilder.accept(planBuilder, aggBuilder.source(filterWithMultiValue).singleGroupingSet(variable("regionid"), variable("city")))), format("SELECT %s FROM realtimeOnly WHERE ((scores = 100) OR (scores = 200)) GROUP BY regionId, city %s 10000", getExpectedAggOutput(expectedAggOutput, "regionId, city"), getGroupByLimitKey())); } protected String getGroupByLimitKey() { return "TOP"; } protected String getExpectedAggOutput(String expectedAggOutput, String groupByColumns) { return expectedAggOutput; } @Test public void testSimpleSelectStar() { testPinotQuery( planBuilder -> limit(planBuilder, 50L, tableScan(planBuilder, pinotTable, regionId, city, fare, secondsSinceEpoch)), "SELECT regionId, city, fare, secondsSinceEpoch FROM realtimeOnly LIMIT 50"); testPinotQuery( planBuilder -> limit(planBuilder, 50L, tableScan(planBuilder, pinotTable, regionId, secondsSinceEpoch)), "SELECT regionId, secondsSinceEpoch FROM realtimeOnly LIMIT 50"); } @Test public void testSimpleSelectWithFilterLimit() { testPinotQuery( planBuilder -> limit(planBuilder, 50L, project(planBuilder, filter(planBuilder, tableScan(planBuilder, pinotTable, regionId, city, fare, secondsSinceEpoch), getRowExpression("secondssinceepoch > 20", defaultSessionHolder)), ImmutableList.of("city", "secondssinceepoch"))), "SELECT city, secondsSinceEpoch FROM realtimeOnly WHERE (secondsSinceEpoch > 20) LIMIT 50"); } @Test public void testCountStar() { testUnaryAggregationHelper((planBuilder, aggregationBuilder) -> aggregationBuilder.addAggregation(planBuilder.variable("agg"), getRowExpression("count(*)", defaultSessionHolder)), "count(*)"); } @Test public void testDistinctCountPushdown() { PlanNode justScan = buildPlan(planBuilder -> tableScan(planBuilder, pinotTable, regionId, secondsSinceEpoch, city, fare)); PlanNode distinctAggregation = buildPlan(planBuilder -> planBuilder.aggregation(aggBuilder -> aggBuilder.source(justScan).singleGroupingSet(variable("regionid")))); testPinotQuery( planBuilder -> planBuilder.aggregation(aggBuilder -> aggBuilder.source(distinctAggregation).globalGrouping().addAggregation(variable("count_regionid"), getRowExpression("count(regionid)", defaultSessionHolder))), "SELECT DISTINCTCOUNT(regionId) FROM realtimeOnly"); } @Test public void testDistinctCountGroupByPushdown() { PlanNode justScan = buildPlan(planBuilder -> tableScan(planBuilder, pinotTable, regionId, secondsSinceEpoch, city, fare)); PlanNode distinctAggregation = buildPlan(planBuilder -> planBuilder.aggregation(aggBuilder -> aggBuilder.source(justScan).singleGroupingSet(variable("city"), variable("regionid")))); testPinotQuery( planBuilder -> planBuilder.aggregation(aggBuilder -> aggBuilder.source(distinctAggregation).singleGroupingSet(variable("city")).addAggregation(variable("count_regionid"), getRowExpression("count(regionid)", defaultSessionHolder))), String.format("SELECT %s FROM realtimeOnly GROUP BY city %s 10000", getExpectedAggOutput("DISTINCTCOUNT(regionId)", "city"), getGroupByLimitKey())); } @Test public void testDistinctCountWithOtherAggregationPushdown() { PlanNode justScan = buildPlan(planBuilder -> tableScan(planBuilder, pinotTable, regionId, secondsSinceEpoch, city, fare)); PlanNode markDistinct = buildPlan(planBuilder -> markDistinct(planBuilder, variable("regionid$distinct"), ImmutableList.of(variable("regionid")), justScan)); PlanNode aggregate = buildPlan(planBuilder -> planBuilder.aggregation(aggBuilder -> aggBuilder.source(markDistinct).addAggregation(planBuilder.variable("agg"), getRowExpression("count(*)", defaultSessionHolder)).addAggregation(planBuilder.variable("count(regionid)"), getRowExpression("count(regionid)", defaultSessionHolder), Optional.empty(), Optional.empty(), false, Optional.of(variable("regionid$distinct"))).globalGrouping())); String expectedPinotQuery; if (aggregate.getOutputVariables().get(0).getName().equalsIgnoreCase("count(regionid)")) { expectedPinotQuery = "SELECT DISTINCTCOUNT(regionId), count(*) FROM realtimeOnly"; } else { expectedPinotQuery = "SELECT count(*), DISTINCTCOUNT(regionId) FROM realtimeOnly"; } testPinotQuery(new PinotConfig().setAllowMultipleAggregations(true), planBuilder -> planBuilder.limit(10, aggregate), expectedPinotQuery); } @Test public void testDistinctCountWithOtherAggregationGroupByPushdown() { PlanNode justScan = buildPlan(planBuilder -> tableScan(planBuilder, pinotTable, regionId, secondsSinceEpoch, city, fare)); PlanNode markDistinct = buildPlan(planBuilder -> markDistinct(planBuilder, variable("regionid$distinct"), ImmutableList.of(variable("regionid")), justScan)); PlanNode aggregate = buildPlan(planBuilder -> planBuilder.aggregation(aggBuilder -> aggBuilder.source(markDistinct).singleGroupingSet(variable("city")).addAggregation(planBuilder.variable("agg"), getRowExpression("count(*)", defaultSessionHolder)).addAggregation(planBuilder.variable("count(regionid)"), getRowExpression("count(regionid)", defaultSessionHolder), Optional.empty(), Optional.empty(), false, Optional.of(variable("regionid$distinct"))))); String expectedPinotQuery; if (aggregate.getOutputVariables().get(1).getName().equalsIgnoreCase("count(regionid)")) { expectedPinotQuery = String.format("SELECT %s FROM realtimeOnly GROUP BY city %s 10000", getExpectedAggOutput("DISTINCTCOUNT(regionId), count(*)", "city"), getGroupByLimitKey()); } else { expectedPinotQuery = String.format("SELECT %s FROM realtimeOnly GROUP BY city %s 10000", getExpectedAggOutput("count(*), DISTINCTCOUNT(regionId)", "city"), getGroupByLimitKey()); } testPinotQuery(new PinotConfig().setAllowMultipleAggregations(true), aggregate, expectedPinotQuery); } @Test public void testDistinctSelection() { PlanNode justScan = buildPlan(planBuilder -> tableScan(planBuilder, pinotTable, regionId, secondsSinceEpoch, city, fare)); testPinotQuery( planBuilder -> planBuilder.aggregation(aggBuilder -> aggBuilder.source(justScan).singleGroupingSet(variable("regionid"))), "SELECT count(*) FROM realtimeOnly GROUP BY regionId TOP 10000"); } @Test public void testPercentileAggregation() { testUnaryAggregationHelper((planBuilder, aggregationBuilder) -> aggregationBuilder.addAggregation(planBuilder.variable("agg"), getRowExpression("approx_percentile(fare, 0.10)", defaultSessionHolder)), "PERCENTILEEST10(fare)"); } @Test public void testApproxDistinct() { testUnaryAggregationHelper((planBuilder, aggregationBuilder) -> aggregationBuilder.addAggregation(planBuilder.variable("agg"), getRowExpression("approx_distinct(fare)", defaultSessionHolder)), "DISTINCTCOUNTHLL(fare)"); testUnaryAggregationHelper((planBuilder, aggregationBuilder) -> aggregationBuilder.addAggregation(planBuilder.variable("agg"), getRowExpression("approx_distinct(fare, 0.1)", defaultSessionHolder)), "DISTINCTCOUNTHLL(fare, 6)"); testUnaryAggregationHelper((planBuilder, aggregationBuilder) -> aggregationBuilder.addAggregation(planBuilder.variable("agg"), getRowExpression("approx_distinct(fare, 0.02)", defaultSessionHolder)), "DISTINCTCOUNTHLL(fare, 11)"); testUnaryAggregationHelper((planBuilder, aggregationBuilder) -> aggregationBuilder.addAggregation(planBuilder.variable("agg"), getRowExpression("approx_distinct(fare, 0.01)", defaultSessionHolder)), "DISTINCTCOUNTHLL(fare, 13)"); testUnaryAggregationHelper((planBuilder, aggregationBuilder) -> aggregationBuilder.addAggregation(planBuilder.variable("agg"), getRowExpression("approx_distinct(fare, 0.005)", defaultSessionHolder)), "DISTINCTCOUNTHLL(fare, 15)"); } @Test public void testApproxDistinctWithInvalidParameters() { PlanNode justScan = buildPlan(planBuilder -> tableScan(planBuilder, pinotTable, regionId, secondsSinceEpoch, city, fare)); PlanNode approxPlanNode = buildPlan(planBuilder -> planBuilder.aggregation(aggBuilder -> aggBuilder.source(justScan).singleGroupingSet(variable("city")).addAggregation(planBuilder.variable("agg"), getRowExpression("approx_distinct(fare, 0)", defaultSessionHolder)))); Optional<PinotQueryGenerator.PinotQueryGeneratorResult> generatedQuery = new PinotQueryGenerator(pinotConfig, functionAndTypeManager, functionAndTypeManager, standardFunctionResolution) .generate(approxPlanNode, defaultSessionHolder.getConnectorSession()); assertFalse(generatedQuery.isPresent()); approxPlanNode = buildPlan(planBuilder -> planBuilder.aggregation(aggBuilder -> aggBuilder.source(justScan).singleGroupingSet(variable("city")).addAggregation(planBuilder.variable("agg"), getRowExpression("approx_distinct(fare, 0.004)", defaultSessionHolder)))); generatedQuery = new PinotQueryGenerator(pinotConfig, functionAndTypeManager, functionAndTypeManager, standardFunctionResolution) .generate(approxPlanNode, defaultSessionHolder.getConnectorSession()); assertFalse(generatedQuery.isPresent()); approxPlanNode = buildPlan(planBuilder -> planBuilder.aggregation(aggBuilder -> aggBuilder.source(justScan).singleGroupingSet(variable("city")).addAggregation(planBuilder.variable("agg"), getRowExpression("approx_distinct(fare, 1)", defaultSessionHolder)))); generatedQuery = new PinotQueryGenerator(pinotConfig, functionAndTypeManager, functionAndTypeManager, standardFunctionResolution) .generate(approxPlanNode, defaultSessionHolder.getConnectorSession()); assertFalse(generatedQuery.isPresent()); } @Test public void testAggWithUDFInGroupBy() { LinkedHashMap<String, String> aggProjection = new LinkedHashMap<>(); aggProjection.put("date", "date_trunc('day', cast(from_unixtime(secondssinceepoch - 50) AS TIMESTAMP))"); PlanNode justDate = buildPlan(planBuilder -> project(planBuilder, tableScan(planBuilder, pinotTable, regionId, secondsSinceEpoch, city, fare), aggProjection, defaultSessionHolder)); testPinotQuery( planBuilder -> planBuilder.aggregation(aggBuilder -> aggBuilder.source(justDate).singleGroupingSet(new VariableReferenceExpression("date", TIMESTAMP)).addAggregation(planBuilder.variable("agg"), getRowExpression("count(*)", defaultSessionHolder))), String.format("SELECT %s FROM realtimeOnly GROUP BY dateTimeConvert(SUB(secondsSinceEpoch, 50), '1:SECONDS:EPOCH', '1:MILLISECONDS:EPOCH', '1:DAYS') %s 10000", getExpectedAggOutput("count(*)", "dateTimeConvert(SUB(secondsSinceEpoch, 50), '1:SECONDS:EPOCH', '1:MILLISECONDS:EPOCH', '1:DAYS')"), getGroupByLimitKey())); aggProjection.put("city", "city"); PlanNode newScanWithCity = buildPlan(planBuilder -> project(planBuilder, tableScan(planBuilder, pinotTable, regionId, secondsSinceEpoch, city, fare), aggProjection, defaultSessionHolder)); testPinotQuery( planBuilder -> planBuilder.aggregation(aggBuilder -> aggBuilder.source(newScanWithCity).singleGroupingSet(new VariableReferenceExpression("date", TIMESTAMP), variable("city")).addAggregation(planBuilder.variable("agg"), getRowExpression("count(*)", defaultSessionHolder))), String.format("SELECT %s FROM realtimeOnly GROUP BY dateTimeConvert(SUB(secondsSinceEpoch, 50), '1:SECONDS:EPOCH', '1:MILLISECONDS:EPOCH', '1:DAYS'), city %s 10000", getExpectedAggOutput("count(*)", "dateTimeConvert(SUB(secondsSinceEpoch, 50), '1:SECONDS:EPOCH', '1:MILLISECONDS:EPOCH', '1:DAYS'), city"), getGroupByLimitKey())); } @Test public void testAggWithArrayFunctionsInGroupBy() { LinkedHashMap<String, String> aggProjection = new LinkedHashMap<>(); aggProjection.put("array_max_0", "array_max(scores)"); PlanNode justMaxScores = buildPlan(planBuilder -> project(planBuilder, tableScan(planBuilder, pinotTable, regionId, secondsSinceEpoch, city, fare, scores), aggProjection, defaultSessionHolder)); testPinotQuery( planBuilder -> planBuilder.aggregation(aggBuilder -> aggBuilder.source(justMaxScores).singleGroupingSet(new VariableReferenceExpression("array_max_0", DOUBLE)).addAggregation(planBuilder.variable("agg"), getRowExpression("count(*)", defaultSessionHolder))), String.format("SELECT %s FROM realtimeOnly GROUP BY arrayMax(scores) %s 10000", getExpectedAggOutput("count(*)", "arrayMax(scores)"), getGroupByLimitKey())); aggProjection.put("city", "city"); PlanNode newScanWithCity = buildPlan(planBuilder -> project(planBuilder, tableScan(planBuilder, pinotTable, regionId, secondsSinceEpoch, city, fare, scores), aggProjection, defaultSessionHolder)); testPinotQuery( planBuilder -> planBuilder.aggregation(aggBuilder -> aggBuilder.source(newScanWithCity).singleGroupingSet(new VariableReferenceExpression("array_max_0", DOUBLE), variable("city")).addAggregation(planBuilder.variable("agg"), getRowExpression("count(*)", defaultSessionHolder))), String.format("SELECT %s FROM realtimeOnly GROUP BY arrayMax(scores), city %s 10000", getExpectedAggOutput("count(*)", "arrayMax(scores), city"), getGroupByLimitKey())); } private void testAggWithArrayFunction(String functionVariable, String prestoFunctionExpression, String pinotFunctionExpression) { LinkedHashMap<String, String> aggProjection = new LinkedHashMap<>(); aggProjection.put("city", "city"); aggProjection.put(functionVariable, prestoFunctionExpression); PlanNode aggregationPlanNode = buildPlan(planBuilder -> project(planBuilder, tableScan(planBuilder, pinotTable, regionId, secondsSinceEpoch, city, fare, scores), aggProjection, defaultSessionHolder)); testPinotQuery( planBuilder -> planBuilder.aggregation(aggBuilder -> aggBuilder.source(aggregationPlanNode).singleGroupingSet(variable("city")).addAggregation(planBuilder.variable("agg"), getRowExpression(String.format("sum(%s)", functionVariable), defaultSessionHolder))), String.format("SELECT %s FROM realtimeOnly GROUP BY city %s 10000", getExpectedAggOutput(String.format("sum(%s)", pinotFunctionExpression), "city"), getGroupByLimitKey())); } @Test public void testAggWithArrayFunctions() { testAggWithArrayFunction("array_min_0", "array_min(scores)", "arrayMin(scores)"); testAggWithArrayFunction("array_max_0", "array_max(scores)", "arrayMax(scores)"); testAggWithArrayFunction("array_sum_0", "reduce(scores, cast(0 as double), (s, x) -> s + x, s -> s)", "arraySum(scores)"); testAggWithArrayFunction("array_average_0", "reduce(scores, CAST(ROW(0.0, 0) AS ROW(sum DOUBLE, count INTEGER)), (s,x) -> CAST(ROW(x + s.sum, s.count + 1) AS ROW(sum DOUBLE, count INTEGER)), s -> IF(s.count = 0, NULL, s.sum / s.count))", "arrayAverage(scores)"); } @Test public void testMultipleAggregatesWithOutGroupBy() { Map<String, String> outputVariables = ImmutableMap.of("agg", "count(*)", "min", "min(fare)"); PlanNode justScan = buildPlan(planBuilder -> tableScan(planBuilder, pinotTable, regionId, secondsSinceEpoch, city, fare)); testPinotQuery( planBuilder -> planBuilder.aggregation(aggBuilder -> aggBuilder.source(justScan).globalGrouping().addAggregation(planBuilder.variable("agg"), getRowExpression("count(*)", defaultSessionHolder)).addAggregation(planBuilder.variable("min"), getRowExpression("min(fare)", defaultSessionHolder))), "SELECT __expressions__ FROM realtimeOnly", defaultSessionHolder, outputVariables); testPinotQuery( planBuilder -> planBuilder.limit(50L, planBuilder.aggregation(aggBuilder -> aggBuilder.source(justScan).globalGrouping().addAggregation(planBuilder.variable("agg"), getRowExpression("count(*)", defaultSessionHolder)).addAggregation(planBuilder.variable("min"), getRowExpression("min(fare)", defaultSessionHolder)))), "SELECT __expressions__ FROM realtimeOnly", defaultSessionHolder, outputVariables); } @Test public void testMultipleAggregatesWhenAllowed() { helperTestMultipleAggregatesWithGroupBy(new PinotConfig().setAllowMultipleAggregations(true)); } @Test(expectedExceptions = NoSuchElementException.class) public void testMultipleAggregatesNotAllowed() { helperTestMultipleAggregatesWithGroupBy(new PinotConfig().setAllowMultipleAggregations(false)); } protected void helperTestMultipleAggregatesWithGroupBy(PinotConfig givenPinotConfig) { Map<String, String> outputVariables = ImmutableMap.of("agg", "count(*)", "min", "min(fare)"); PlanNode justScan = buildPlan(planBuilder -> tableScan(planBuilder, pinotTable, regionId, secondsSinceEpoch, city, fare)); testPinotQuery( givenPinotConfig, planBuilder -> planBuilder.aggregation(aggBuilder -> aggBuilder.source(justScan).singleGroupingSet(variable("city")).addAggregation(planBuilder.variable("agg"), getRowExpression("count(*)", defaultSessionHolder)).addAggregation(planBuilder.variable("min"), getRowExpression("min(fare)", defaultSessionHolder))), String.format("SELECT %s FROM realtimeOnly GROUP BY city %s 10000", getExpectedAggOutput("__expressions__", "city"), getGroupByLimitKey()), defaultSessionHolder, outputVariables); } @Test(expectedExceptions = NoSuchElementException.class) public void testMultipleAggregateGroupByWithLimitFails() { Map<String, String> outputVariables = ImmutableMap.of("agg", "count(*)", "min", "min(fare)"); PlanNode justScan = buildPlan(planBuilder -> tableScan(planBuilder, pinotTable, regionId, secondsSinceEpoch, city, fare)); testPinotQuery( planBuilder -> planBuilder.limit(50L, planBuilder.aggregation(aggBuilder -> aggBuilder.source(justScan).singleGroupingSet(variable("city")).addAggregation(planBuilder.variable("agg"), getRowExpression("count(*)", defaultSessionHolder)).addAggregation(planBuilder.variable("min"), getRowExpression("min(fare)", defaultSessionHolder)))), String.format("SELECT %s FROM realtimeOnly GROUP BY city %s 50", getExpectedAggOutput("__expressions__", "city"), getGroupByLimitKey()), defaultSessionHolder, outputVariables); } @Test(expectedExceptions = NoSuchElementException.class) public void testForbiddenProjectionOutsideOfAggregation() { LinkedHashMap<String, String> projections = new LinkedHashMap<>(ImmutableMap.of("hour", "date_trunc('hour', from_unixtime(secondssinceepoch))", "regionid", "regionid")); PlanNode plan = buildPlan(planBuilder -> limit(planBuilder, 10, project(planBuilder, tableScan(planBuilder, pinotTable, secondsSinceEpoch, regionId), projections, defaultSessionHolder))); testPinotQuery(pinotConfig, plan, "Should fail", defaultSessionHolder, ImmutableMap.of()); } @Test public void testSimpleSelectWithTopN() { pinotConfig.setPushdownTopNBrokerQueries(true); SessionHolder sessionHolder = new SessionHolder(pinotConfig); PlanBuilder planBuilder = createPlanBuilder(new SessionHolder(pinotConfig)); TableScanNode tableScanNode = tableScan(planBuilder, pinotTable, regionId, city, fare); TopNNode topNFare = topN(planBuilder, 50L, ImmutableList.of("fare"), ImmutableList.of(false), tableScanNode); testPinotQuery( pinotConfig, topNFare, "SELECT regionId, city, fare FROM realtimeOnly ORDER BY fare DESC LIMIT 50", sessionHolder, ImmutableMap.of()); TopNNode topnFareAndCity = topN(planBuilder, 50L, ImmutableList.of("fare", "city"), ImmutableList.of(true, false), tableScanNode); testPinotQuery( pinotConfig, topnFareAndCity, "SELECT regionId, city, fare FROM realtimeOnly ORDER BY fare, city DESC LIMIT 50", sessionHolder, ImmutableMap.of()); ProjectNode projectNode = project(planBuilder, topnFareAndCity, ImmutableList.of("regionid", "city")); testPinotQuery(pinotConfig, projectNode, "SELECT regionId, city FROM realtimeOnly ORDER BY fare, city DESC LIMIT 50", sessionHolder, ImmutableMap.of()); } @Test(expectedExceptions = NoSuchElementException.class) public void testAggregationWithOrderByPushDownInTopN() { PlanBuilder planBuilder = createPlanBuilder(defaultSessionHolder); TableScanNode tableScanNode = tableScan(planBuilder, pinotTable, city, fare); AggregationNode agg = planBuilder.aggregation(aggBuilder -> aggBuilder.source(tableScanNode).singleGroupingSet(variable("city")).addAggregation(planBuilder.variable("agg"), getRowExpression("sum(fare)", defaultSessionHolder))); TopNNode topN = new TopNNode(planBuilder.getIdAllocator().getNextId(), agg, 50L, new OrderingScheme(ImmutableList.of(new Ordering(variable("city"), SortOrder.DESC_NULLS_FIRST))), TopNNode.Step.FINAL); testPinotQuery(pinotConfig, topN, "", defaultSessionHolder, ImmutableMap.of()); } @Test public void testDistinctLimitPushdown() { PlanBuilder planBuilder = createPlanBuilder(defaultSessionHolder); DistinctLimitNode distinctLimitNode = distinctLimit( planBuilder, ImmutableList.of(new VariableReferenceExpression("regionid", BIGINT)), 50L, tableScan(planBuilder, pinotTable, regionId)); testPinotQuery( pinotConfig, distinctLimitNode, String.format("SELECT %s FROM realtimeOnly GROUP BY regionId %s 50", getExpectedDistinctOutput("regionId"), getGroupByLimitKey()), defaultSessionHolder, ImmutableMap.of()); planBuilder = createPlanBuilder(defaultSessionHolder); distinctLimitNode = distinctLimit( planBuilder, ImmutableList.of( new VariableReferenceExpression("regionid", BIGINT), new VariableReferenceExpression("city", VARCHAR)), 50L, tableScan(planBuilder, pinotTable, regionId, city)); testPinotQuery( pinotConfig, distinctLimitNode, String.format("SELECT %s FROM realtimeOnly GROUP BY regionId, city %s 50", getExpectedDistinctOutput("regionId, city"), getGroupByLimitKey()), defaultSessionHolder, ImmutableMap.of()); } protected String getExpectedDistinctOutput(String groupKeys) { return "count(*)"; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.catalina.valves; import java.io.IOException; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.Collections; import java.util.LinkedList; import java.util.List; import jakarta.servlet.ServletException; import org.apache.catalina.connector.Request; import org.apache.catalina.connector.Response; import org.apache.catalina.util.NetMask; import org.apache.juli.logging.Log; import org.apache.juli.logging.LogFactory; public final class RemoteCIDRValve extends RequestFilterValve { /** * Our logger */ private static final Log log = LogFactory.getLog(RemoteCIDRValve.class); /** * The list of allowed {@link NetMask}s */ private final List<NetMask> allow = new ArrayList<>(); /** * The list of denied {@link NetMask}s */ private final List<NetMask> deny = new ArrayList<>(); public RemoteCIDRValve() { } /** * Return a string representation of the {@link NetMask} list in #allow. * * @return the #allow list as a string, without the leading '[' and trailing * ']' */ @Override public String getAllow() { return allow.toString().replace("[", "").replace("]", ""); } /** * Fill the #allow list with the list of netmasks provided as an argument, * if any. Calls #fillFromInput. * * @param input The list of netmasks, as a comma separated string * @throws IllegalArgumentException One or more netmasks are invalid */ @Override public void setAllow(final String input) { final List<String> messages = fillFromInput(input, allow); if (messages.isEmpty()) { return; } allowValid = false; for (final String message : messages) { log.error(message); } throw new IllegalArgumentException(sm.getString("remoteCidrValve.invalid", "allow")); } /** * Return a string representation of the {@link NetMask} list in #deny. * * @return the #deny list as a string, without the leading '[' and trailing * ']' */ @Override public String getDeny() { return deny.toString().replace("[", "").replace("]", ""); } /** * Fill the #deny list with the list of netmasks provided as an argument, if * any. Calls #fillFromInput. * * @param input The list of netmasks, as a comma separated string * @throws IllegalArgumentException One or more netmasks are invalid */ @Override public void setDeny(final String input) { final List<String> messages = fillFromInput(input, deny); if (messages.isEmpty()) { return; } denyValid = false; for (final String message : messages) { log.error(message); } throw new IllegalArgumentException(sm.getString("remoteCidrValve.invalid", "deny")); } @Override public void invoke(final Request request, final Response response) throws IOException, ServletException { String property; if (getUsePeerAddress()) { property = request.getPeerAddr(); } else { property = request.getRequest().getRemoteAddr(); } if (getAddConnectorPort()) { property = property + ";" + request.getConnector().getPortWithOffset(); } process(property, request, response); } @Override public boolean isAllowed(final String property) { final int portIdx = property.indexOf(';'); final int port; final String nonPortPart; if (portIdx == -1) { if (getAddConnectorPort()) { log.error(sm.getString("remoteCidrValve.noPort")); return false; } port = -1; nonPortPart = property; } else { if (!getAddConnectorPort()) { log.error(sm.getString("remoteCidrValve.unexpectedPort")); return false; } nonPortPart = property.substring(0, portIdx); try { port = Integer.parseInt(property.substring(portIdx + 1)); } catch (NumberFormatException e) { // This should be in the 'could never happen' category but handle it // to be safe. log.error(sm.getString("remoteCidrValve.noPort"), e); return false; } } final InetAddress addr; try { addr = InetAddress.getByName(nonPortPart); } catch (UnknownHostException e) { // This should be in the 'could never happen' category but handle it // to be safe. log.error(sm.getString("remoteCidrValve.noRemoteIp"), e); return false; } for (final NetMask nm : deny) { if (getAddConnectorPort()) { if (nm.matches(addr, port)) { return false; } } else { if (nm.matches(addr)) { return false; } } } for (final NetMask nm : allow) { if (getAddConnectorPort()) { if (nm.matches(addr, port)) { return true; } } else { if (nm.matches(addr)) { return true; } } } // Allow if deny is specified but allow isn't if (!deny.isEmpty() && allow.isEmpty()) { return true; } // Deny this request return false; } @Override protected Log getLog() { return log; } /** * Fill a {@link NetMask} list from a string input containing a * comma-separated list of (hopefully valid) {@link NetMask}s. * * @param input The input string * @param target The list to fill * @return a string list of processing errors (empty when no errors) */ private List<String> fillFromInput(final String input, final List<NetMask> target) { target.clear(); if (input == null || input.isEmpty()) { return Collections.emptyList(); } final List<String> messages = new LinkedList<>(); NetMask nm; for (final String s : input.split("\\s*,\\s*")) { try { nm = new NetMask(s); target.add(nm); } catch (IllegalArgumentException e) { messages.add(s + ": " + e.getMessage()); } } return Collections.unmodifiableList(messages); } }
package br.com.gtmf.wsdl; import java.util.List; import javax.jws.WebMethod; import javax.jws.WebParam; import javax.jws.WebResult; import javax.jws.WebService; import javax.xml.bind.annotation.XmlSeeAlso; import javax.xml.ws.Action; import javax.xml.ws.RequestWrapper; import javax.xml.ws.ResponseWrapper; /** * This class was generated by the JAX-WS RI. * JAX-WS RI 2.2.4-b01 * Generated source version: 2.2 * */ @WebService(name = "SimInterfaceImpl", targetNamespace = "http://gtmf.com.br/") @XmlSeeAlso({ ObjectFactory.class }) public interface SimInterfaceImpl { /** * * @param arg0 * @return * returns java.lang.String */ @WebMethod @WebResult(targetNamespace = "") @RequestWrapper(localName = "login", targetNamespace = "http://gtmf.com.br/", className = "br.com.gtmf.wsdl.Login") @ResponseWrapper(localName = "loginResponse", targetNamespace = "http://gtmf.com.br/", className = "br.com.gtmf.wsdl.LoginResponse") @Action(input = "http://gtmf.com.br/SimInterfaceImpl/loginRequest", output = "http://gtmf.com.br/SimInterfaceImpl/loginResponse") public String login( @WebParam(name = "arg0", targetNamespace = "") String arg0); /** * * @param arg0 * @return * returns java.lang.String */ @WebMethod @WebResult(targetNamespace = "") @RequestWrapper(localName = "logout", targetNamespace = "http://gtmf.com.br/", className = "br.com.gtmf.wsdl.Logout") @ResponseWrapper(localName = "logoutResponse", targetNamespace = "http://gtmf.com.br/", className = "br.com.gtmf.wsdl.LogoutResponse") @Action(input = "http://gtmf.com.br/SimInterfaceImpl/logoutRequest", output = "http://gtmf.com.br/SimInterfaceImpl/logoutResponse") public String logout( @WebParam(name = "arg0", targetNamespace = "") String arg0); /** * * @param arg1 * @param arg0 * @return * returns java.lang.String */ @WebMethod @WebResult(targetNamespace = "") @RequestWrapper(localName = "startRadar", targetNamespace = "http://gtmf.com.br/", className = "br.com.gtmf.wsdl.StartRadar") @ResponseWrapper(localName = "startRadarResponse", targetNamespace = "http://gtmf.com.br/", className = "br.com.gtmf.wsdl.StartRadarResponse") @Action(input = "http://gtmf.com.br/SimInterfaceImpl/startRadarRequest", output = "http://gtmf.com.br/SimInterfaceImpl/startRadarResponse") public String startRadar( @WebParam(name = "arg0", targetNamespace = "") String arg0, @WebParam(name = "arg1", targetNamespace = "") String arg1); /** * * @param arg0 * @return * returns java.lang.String */ @WebMethod @WebResult(targetNamespace = "") @RequestWrapper(localName = "toStringList", targetNamespace = "http://gtmf.com.br/", className = "br.com.gtmf.wsdl.ToStringList") @ResponseWrapper(localName = "toStringListResponse", targetNamespace = "http://gtmf.com.br/", className = "br.com.gtmf.wsdl.ToStringListResponse") @Action(input = "http://gtmf.com.br/SimInterfaceImpl/toStringListRequest", output = "http://gtmf.com.br/SimInterfaceImpl/toStringListResponse") public String toStringList( @WebParam(name = "arg0", targetNamespace = "") List<String> arg0); /** * * @param arg0 * @return * returns java.lang.String */ @WebMethod @WebResult(targetNamespace = "") @RequestWrapper(localName = "stopRadar", targetNamespace = "http://gtmf.com.br/", className = "br.com.gtmf.wsdl.StopRadar") @ResponseWrapper(localName = "stopRadarResponse", targetNamespace = "http://gtmf.com.br/", className = "br.com.gtmf.wsdl.StopRadarResponse") @Action(input = "http://gtmf.com.br/SimInterfaceImpl/stopRadarRequest", output = "http://gtmf.com.br/SimInterfaceImpl/stopRadarResponse") public String stopRadar( @WebParam(name = "arg0", targetNamespace = "") String arg0); /** * * @param arg2 * @param arg1 * @param arg0 * @return * returns java.lang.String */ @WebMethod @WebResult(targetNamespace = "") @RequestWrapper(localName = "writeTo", targetNamespace = "http://gtmf.com.br/", className = "br.com.gtmf.wsdl.WriteTo") @ResponseWrapper(localName = "writeToResponse", targetNamespace = "http://gtmf.com.br/", className = "br.com.gtmf.wsdl.WriteToResponse") @Action(input = "http://gtmf.com.br/SimInterfaceImpl/writeToRequest", output = "http://gtmf.com.br/SimInterfaceImpl/writeToResponse") public String writeTo( @WebParam(name = "arg0", targetNamespace = "") String arg0, @WebParam(name = "arg1", targetNamespace = "") String arg1, @WebParam(name = "arg2", targetNamespace = "") String arg2); /** * * @param arg1 * @param arg0 * @return * returns java.lang.String */ @WebMethod @WebResult(targetNamespace = "") @RequestWrapper(localName = "changeLocation", targetNamespace = "http://gtmf.com.br/", className = "br.com.gtmf.wsdl.ChangeLocation") @ResponseWrapper(localName = "changeLocationResponse", targetNamespace = "http://gtmf.com.br/", className = "br.com.gtmf.wsdl.ChangeLocationResponse") @Action(input = "http://gtmf.com.br/SimInterfaceImpl/changeLocationRequest", output = "http://gtmf.com.br/SimInterfaceImpl/changeLocationResponse") public String changeLocation( @WebParam(name = "arg0", targetNamespace = "") String arg0, @WebParam(name = "arg1", targetNamespace = "") String arg1); /** * * @param arg0 * @return * returns java.lang.String */ @WebMethod @WebResult(targetNamespace = "") @RequestWrapper(localName = "listNearlyClients", targetNamespace = "http://gtmf.com.br/", className = "br.com.gtmf.wsdl.ListNearlyClients") @ResponseWrapper(localName = "listNearlyClientsResponse", targetNamespace = "http://gtmf.com.br/", className = "br.com.gtmf.wsdl.ListNearlyClientsResponse") @Action(input = "http://gtmf.com.br/SimInterfaceImpl/listNearlyClientsRequest", output = "http://gtmf.com.br/SimInterfaceImpl/listNearlyClientsResponse") public String listNearlyClients( @WebParam(name = "arg0", targetNamespace = "") String arg0); /** * * @param arg0 * @return * returns java.lang.String */ @WebMethod @WebResult(targetNamespace = "") @RequestWrapper(localName = "addRoom", targetNamespace = "http://gtmf.com.br/", className = "br.com.gtmf.wsdl.AddRoom") @ResponseWrapper(localName = "addRoomResponse", targetNamespace = "http://gtmf.com.br/", className = "br.com.gtmf.wsdl.AddRoomResponse") @Action(input = "http://gtmf.com.br/SimInterfaceImpl/addRoomRequest", output = "http://gtmf.com.br/SimInterfaceImpl/addRoomResponse") public String addRoom( @WebParam(name = "arg0", targetNamespace = "") String arg0); /** * * @param arg0 * @return * returns java.lang.String */ @WebMethod @WebResult(targetNamespace = "") @RequestWrapper(localName = "getRoom", targetNamespace = "http://gtmf.com.br/", className = "br.com.gtmf.wsdl.GetRoom") @ResponseWrapper(localName = "getRoomResponse", targetNamespace = "http://gtmf.com.br/", className = "br.com.gtmf.wsdl.GetRoomResponse") @Action(input = "http://gtmf.com.br/SimInterfaceImpl/getRoomRequest", output = "http://gtmf.com.br/SimInterfaceImpl/getRoomResponse") public String getRoom( @WebParam(name = "arg0", targetNamespace = "") String arg0); /** * * @return * returns java.lang.String */ @WebMethod @WebResult(targetNamespace = "") @RequestWrapper(localName = "listRooms", targetNamespace = "http://gtmf.com.br/", className = "br.com.gtmf.wsdl.ListRooms") @ResponseWrapper(localName = "listRoomsResponse", targetNamespace = "http://gtmf.com.br/", className = "br.com.gtmf.wsdl.ListRoomsResponse") @Action(input = "http://gtmf.com.br/SimInterfaceImpl/listRoomsRequest", output = "http://gtmf.com.br/SimInterfaceImpl/listRoomsResponse") public String listRooms(); /** * * @param arg1 * @param arg0 * @return * returns java.lang.String */ @WebMethod @WebResult(targetNamespace = "") @RequestWrapper(localName = "changeClientRoom", targetNamespace = "http://gtmf.com.br/", className = "br.com.gtmf.wsdl.ChangeClientRoom") @ResponseWrapper(localName = "changeClientRoomResponse", targetNamespace = "http://gtmf.com.br/", className = "br.com.gtmf.wsdl.ChangeClientRoomResponse") @Action(input = "http://gtmf.com.br/SimInterfaceImpl/changeClientRoomRequest", output = "http://gtmf.com.br/SimInterfaceImpl/changeClientRoomResponse") public String changeClientRoom( @WebParam(name = "arg0", targetNamespace = "") String arg0, @WebParam(name = "arg1", targetNamespace = "") String arg1); /** * * @param arg0 * @return * returns java.lang.String */ @WebMethod @WebResult(targetNamespace = "") @RequestWrapper(localName = "toString", targetNamespace = "http://gtmf.com.br/", className = "br.com.gtmf.wsdl.ToString") @ResponseWrapper(localName = "toStringResponse", targetNamespace = "http://gtmf.com.br/", className = "br.com.gtmf.wsdl.ToStringResponse") @Action(input = "http://gtmf.com.br/SimInterfaceImpl/toStringRequest", output = "http://gtmf.com.br/SimInterfaceImpl/toStringResponse") public String toString( @WebParam(name = "arg0", targetNamespace = "") List<Client> arg0); /** * * @param arg1 * @param arg0 * @return * returns java.lang.String */ @WebMethod @WebResult(targetNamespace = "") @RequestWrapper(localName = "write", targetNamespace = "http://gtmf.com.br/", className = "br.com.gtmf.wsdl.Write") @ResponseWrapper(localName = "writeResponse", targetNamespace = "http://gtmf.com.br/", className = "br.com.gtmf.wsdl.WriteResponse") @Action(input = "http://gtmf.com.br/SimInterfaceImpl/writeRequest", output = "http://gtmf.com.br/SimInterfaceImpl/writeResponse") public String write( @WebParam(name = "arg0", targetNamespace = "") String arg0, @WebParam(name = "arg1", targetNamespace = "") String arg1); }
package betterwithaddons.tileentity; import betterwithaddons.block.EriottoMod.BlockNabe; import betterwithaddons.crafting.manager.CraftingManagerNabe; import betterwithaddons.crafting.recipes.INabeRecipe; import betterwithaddons.util.InventoryUtil; import betterwithaddons.util.NabeResult; import betterwithaddons.util.StackResult; import betterwithmods.common.registry.heat.BWMHeatRegistry; import net.minecraft.block.material.Material; import net.minecraft.block.state.IBlockState; import net.minecraft.entity.item.EntityItem; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.init.SoundEvents; import net.minecraft.inventory.EntityEquipmentSlot; import net.minecraft.item.ItemStack; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.util.*; import net.minecraft.util.math.AxisAlignedBB; import net.minecraft.util.math.BlockPos; import net.minecraft.world.World; import net.minecraftforge.fluids.Fluid; import net.minecraftforge.fluids.FluidRegistry; import net.minecraftforge.fluids.FluidStack; import net.minecraftforge.fluids.FluidUtil; import net.minecraftforge.fluids.capability.IFluidHandlerItem; import net.minecraftforge.items.IItemHandler; import net.minecraftforge.items.ItemStackHandler; import java.util.ArrayList; import java.util.List; import java.util.Random; public class TileEntityNabe extends TileEntityBase implements ITickable { public ItemStackHandler inventory = createItemStackHandler(); public int fireIntensity; private NabeResult result = new NabeResult(FluidRegistry.getFluidStack("water",0)); private int boilingTime = 0; public Random random = new Random(); public ItemStackHandler createItemStackHandler() { return new SimpleItemStackHandler(this, true, 6) { @Override public int getSlotLimit(int slot) { return 1; } }; } @Override public void writeDataToNBT(NBTTagCompound compound) { compound.setTag("inventory",inventory.serializeNBT()); compound.setTag("fill",result.serializeNBT()); compound.setInteger("boilTime",boilingTime); } @Override public void readDataFromNBT(NBTTagCompound compound) { inventory = createItemStackHandler(); inventory.deserializeNBT(compound.getCompoundTag("inventory")); result = NabeResult.deserializeNBT(compound.getCompoundTag("fill")); boilingTime = compound.getInteger("boilTime"); } public int getFireIntensity() { return BWMHeatRegistry.getHeat(world,pos.down()); } @Override public void update() { if (world == null || pos == null) return; IBlockState state = world.getBlockState(pos); IBlockState upstate = world.getBlockState(pos.up()); if (!(state.getBlock() instanceof BlockNabe)) return; if (world.isRemote && boilingTime > 0) { world.spawnParticle(EnumParticleTypes.SMOKE_LARGE,pos.getX()+random.nextDouble(),pos.getY()+1.0,pos.getZ()+random.nextDouble(),0,0.001,0); //TODO: Smonk return; } entityCollision(); if (this.fireIntensity != getFireIntensity()) { validateFireIntensity(); } if (!result.isFull() && isWater(upstate)) { resetWater(); } if (hasWater() && this.fireIntensity > 0 && !isWater(upstate)) { List<ItemStack> compressedStacks = getCompressedStacks(); INabeRecipe recipe = CraftingManagerNabe.getInstance().getMostValidRecipe(this, compressedStacks); if(recipe != null) { boilingTime += this.fireIntensity; if(boilingTime >= recipe.getBoilingTime(this)) { result = recipe.craft(this, compressedStacks); ejectMisfits(); boilingTime = 0; } markDirty(); syncTE(); } else boilingTime = 0; } else boilingTime = Math.max(boilingTime-1,0); } private boolean hasWater() { FluidStack fluidStack = result.getFluid(); return result.isFull() && fluidStack != null && fluidStack.getFluid() == FluidRegistry.WATER; } private List<ItemStack> getCompressedStacks() { ArrayList<ItemStack> stacks = new ArrayList<>(); for(int i = 0; i < inventory.getSlots(); i++) { ItemStack stack = inventory.getStackInSlot(i).copy(); if(stack.isEmpty()) continue; stacks.stream().filter(existing -> existing.isItemEqual(stack)).forEach(existing -> { int count = Math.min(stack.getCount(), existing.getMaxStackSize() - existing.getCount()); existing.grow(count); stack.shrink(count); }); if(!stack.isEmpty()) stacks.add(stack); } return stacks; } private void resetWater() { result = new NabeResult(FluidRegistry.getFluidStack("water",NabeResult.MAX_FLUID_FILL)); markDirty(); syncTE(); } private boolean isWater(IBlockState upstate) { return upstate.getMaterial() == Material.WATER; } private void validateFireIntensity() { fireIntensity = BWMHeatRegistry.getHeat(world,pos.down()); } @Override public boolean shouldRefresh(World world, BlockPos pos, IBlockState oldState, IBlockState newSate) { return oldState.getBlock() != newSate.getBlock(); } public int countIngredients() { int count = 0; for (int i = 0; i < inventory.getSlots(); i++) if (!inventory.getStackInSlot(i).isEmpty()) count++; return count; } public void onBlockActivated(EntityPlayer playerIn, EnumHand hand, EnumFacing facing, float hitX, float hitY, float hitZ) { ItemStack originalItem = playerIn.getHeldItem(hand); StackResult retrieved = result.take(originalItem.copy()); if (!retrieved.isSuccess()) { if (!result.isFull()) { //Try to reset water if not full retrieved = putWater(originalItem); } else if (hasWater() && CraftingManagerNabe.getInstance().isValidItem(originalItem)) { //Otherwise try to insert it retrieved = putItem(originalItem); } } if (retrieved.isSuccess()) { if (!retrieved.getReturnStack().isEmpty()) { InventoryUtil.addItemToPlayer(playerIn, retrieved.getReturnStack()); } EntityEquipmentSlot slot = hand == EnumHand.MAIN_HAND ? EntityEquipmentSlot.MAINHAND : EntityEquipmentSlot.OFFHAND; playerIn.setItemStackToSlot(slot, retrieved.getOriginalStack()); } markDirty(); syncTE(); } private void ejectMisfits() { for(int i = 0; i < inventory.getSlots(); i++) { ItemStack stack = inventory.getStackInSlot(i); if(!CraftingManagerNabe.getInstance().isValidItem(stack)) { inventory.setStackInSlot(i,ItemStack.EMPTY); ejectStack(stack); } } } private void ejectStack(ItemStack stack) { float xEject = random.nextFloat() * 0.5f - 0.25f; float zEject = random.nextFloat() * 0.5f - 0.25f; float xOff = 0.5F; float yOff = 1.2f; float zOff = 0.5F; EntityItem item = new EntityItem(this.getWorld(), pos.getX() + xOff, pos.getY() + yOff, pos.getZ() + zOff, stack); item.motionX = xEject * 0.1f; item.motionY = 0.1f; item.motionZ = zEject * 0.1f; item.setDefaultPickupDelay(); this.getWorld().spawnEntity(item); } private boolean captureDroppedItems() { List<EntityItem> items = this.getCaptureItems(getWorld(), getPos()); SoundEvent sound = null; if (items.size() > 0) { for (EntityItem item : items) { if(CraftingManagerNabe.getInstance().isValidItem(item.getItem()) && putDropInInventoryAllSlots(inventory, item)) sound = SoundEvents.ENTITY_ITEM_PICKUP; if(!result.isFull() && putDropWaterFill(item)) sound = SoundEvents.ITEM_BUCKET_FILL; } if (sound != null) { this.getWorld().playSound(null, pos.getX(), pos.getY(), pos.getZ(), sound, SoundCategory.PLAYERS, 0.2F, ((getWorld().rand.nextFloat() - getWorld().rand.nextFloat()) * 0.7F + 1.0F) * 2.0F); return true; } } return false; } private boolean putDropWaterFill(EntityItem item) { ItemStack stack = item.getItem(); StackResult result = putWater(stack.copy()); item.setItem(result.getOriginalStack()); ejectStack(result.getReturnStack()); return result.isSuccess(); } public StackResult putItem(ItemStack stack) { ItemStack originalItem = stack.copy(); ItemStack oneItem = originalItem.splitStack(1); ItemStack returnStack = attemptToInsert(inventory, oneItem); if(returnStack.isEmpty()) { return new StackResult(true, originalItem, returnStack); } return new StackResult(false,stack); } private StackResult putWater(ItemStack stack) { ItemStack original = stack.copy(); ItemStack container = original.splitStack(1); IFluidHandlerItem handler = FluidUtil.getFluidHandler(container); if(handler != null) { FluidStack drainResource = new FluidStack(FluidRegistry.WATER, Fluid.BUCKET_VOLUME); FluidStack fluid = handler.drain(drainResource,false); if(fluid != null && fluid.getFluid() == FluidRegistry.WATER && fluid.amount >= Fluid.BUCKET_VOLUME) { handler.drain(drainResource,true); resetWater(); return new StackResult(true,original,handler.getContainer()); } } return new StackResult(false,stack); } private void entityCollision() { boolean flag = false; if (!isFull()) { flag = captureDroppedItems(); } if (flag) { this.markDirty(); } } private boolean isFull() { for (int i = 0; i < this.inventory.getSlots(); i++) { ItemStack itemstack = this.inventory.getStackInSlot(i); if (itemstack.isEmpty() || itemstack.getCount() != itemstack.getMaxStackSize()) { return false; } } return true; } public static boolean putDropInInventoryAllSlots(IItemHandler inv, EntityItem entityItem) { boolean putAll = false; if (entityItem == null) { return false; } else { ItemStack itemstack = entityItem.getItem().copy(); ItemStack leftovers = attemptToInsert(inv, itemstack); if (!leftovers.isEmpty() && leftovers.getCount() != 0) { entityItem.setItem(leftovers); } else { putAll = true; entityItem.setDead(); } return putAll; } } public static ItemStack attemptToInsert(IItemHandler inv, ItemStack stack) { ItemStack leftover = stack; for (int slot = 0; slot < inv.getSlots(); slot++) { leftover = inv.insertItem(slot, leftover, false); if (leftover.isEmpty()) break; } return leftover; } public List<EntityItem> getCaptureItems(World worldIn, BlockPos pos) { return worldIn.getEntitiesWithinAABB(EntityItem.class, new AxisAlignedBB(pos.getX(), pos.getY(), pos.getZ(), pos.getX() + 1D, pos.getY() + 1.5D, pos.getZ() + 1D), EntitySelectors.IS_ALIVE); } public ItemStack consumeItem(ItemStack stack) { if(stack.getItem().hasContainerItem(stack)) return stack.getItem().getContainerItem(stack); stack.shrink(1); return stack; } public NabeResult getFill() { return result; } }
package org.tuckey.web.filters.urlrewrite.utils; import org.tuckey.web.filters.urlrewrite.Condition; import org.tuckey.web.filters.urlrewrite.Conf; import org.tuckey.web.filters.urlrewrite.NormalRule; import org.tuckey.web.filters.urlrewrite.SetAttribute; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.util.ArrayList; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * Loader to enable loading of mod_rewrite style configuration for UrlRewriteFilter. */ public class ModRewriteConfLoader { private static Log log = Log.getLog(ModRewriteConfLoader.class); private final Pattern LOG_LEVEL_PATTERN = Pattern.compile("RewriteLogLevel\\s+([0-9]+)\\s*$"); private final Pattern LOG_TYPE_PATTERN = Pattern.compile("RewriteLog\\s+(.*)$"); private final Pattern ENGINE_PATTERN = Pattern.compile("RewriteEngine\\s+([a-zA-Z0-9]+)\\s*$"); private final Pattern CONDITION_PATTERN = Pattern.compile("RewriteCond\\s+(.*)$"); private final Pattern RULE_PATTERN = Pattern.compile("RewriteRule\\s+(.*)$"); public void process(InputStream is, Conf conf) throws IOException { String line; BufferedReader in = new BufferedReader(new InputStreamReader(is)); StringBuffer buffer = new StringBuffer(); while ((line = in.readLine()) != null) { buffer.append(line); buffer.append("\n"); } process(buffer.toString(), conf); } public void process(String modRewriteStyleConf, Conf conf) { String[] lines = modRewriteStyleConf.split("\n"); List conditionsBuffer = new ArrayList(); StringBuffer notesBuffer = new StringBuffer(); String logLevelStr = null; String logTypeStr = null; for (int i = 0; i < lines.length; i++) { String line = StringUtils.trimToNull(lines[i]); if (line == null) continue; log.debug("processing line: " + line); if (line.startsWith("#")) { log.debug("adding note line (line starting with #)"); if (notesBuffer.length() > 0) notesBuffer.append("\n"); String noteLine = StringUtils.trim(line.substring(1)); notesBuffer.append(noteLine); } else if (line.startsWith("RewriteBase")) { log.info("RewriteBase not supported, ignored"); } else if (line.startsWith("RewriteCond")) { Condition condition = processRewriteCond(line); if (condition != null) conditionsBuffer.add(condition); } else if (line.startsWith("RewriteEngine")) { processRewriteEngine(conf, line); } else if (line.startsWith("RewriteLock")) { log.error("RewriteLock not supported, ignored"); } else if (line.startsWith("RewriteLogLevel")) { logLevelStr = parseLogLevel(logLevelStr, line); } else if (line.startsWith("RewriteLog")) { logTypeStr = parseLogType(logTypeStr, line); } else if (line.startsWith("RewriteMap")) { log.error("RewriteMap not supported, ignored"); } else if (line.startsWith("RewriteOptions")) { log.error("RewriteOptions not supported, ignored"); } else if (line.startsWith("RewriteRule")) { parseRule(conf, conditionsBuffer, notesBuffer, line); notesBuffer = new StringBuffer(); conditionsBuffer = new ArrayList(); } } if (logTypeStr != null || logLevelStr != null) { String logStr = (logTypeStr == null ? "" : logTypeStr) + (logLevelStr == null ? "" : ":" + logLevelStr); log.debug("setting log to: " + logStr); Log.setLevel(logStr); } if (conditionsBuffer.size() > 0) { log.error("conditions left over without a rule"); } } private void parseRule(Conf conf, List conditionsBuffer, StringBuffer notesBuffer, String line) { NormalRule rule = processRule(line); for (int j = 0; j < conditionsBuffer.size(); j++) { Condition condition = (Condition) conditionsBuffer.get(j); rule.addCondition(condition); } if (notesBuffer.length() > 0) rule.setNote(notesBuffer.toString()); conf.addRule(rule); } private String parseLogType(String logTypeStr, String line) { Matcher logTypeMatcher = LOG_TYPE_PATTERN.matcher(line); if (logTypeMatcher.matches()) { logTypeStr = StringUtils.trimToNull(logTypeMatcher.group(1)); if (logTypeStr != null) { logTypeStr = logTypeStr.replaceAll("\"", ""); log.debug("RewriteLog parsed as " + logTypeStr); } } return logTypeStr; } private String parseLogLevel(String logLevelStr, String line) { log.debug("found a RewriteLogLevel"); Matcher logLevelMatcher = LOG_LEVEL_PATTERN.matcher(line); if (logLevelMatcher.matches()) { int logLevel = NumberUtils.stringToInt(logLevelMatcher.group(1)); if (logLevel <= 1) logLevelStr = "FATAL"; else if (logLevel == 2) logLevelStr = "ERROR"; else if (logLevel == 3) logLevelStr = "INFO"; else if (logLevel == 4) logLevelStr = "WARN"; else if (logLevel >= 5) logLevelStr = "DEBUG"; log.debug("RewriteLogLevel parsed as " + logLevel); } else { log.error("cannot parse " + line); } return logLevelStr; } private NormalRule processRule(String line) { NormalRule rule = new NormalRule(); Matcher ruleMatcher = RULE_PATTERN.matcher(line); if (ruleMatcher.matches()) { String rulePartStr = StringUtils.trimToNull(ruleMatcher.group(1)); if (rulePartStr != null) { log.debug("got rule " + rulePartStr); String[] ruleParts = rulePartStr.split(" "); int partCounter = 0; for (int j = 0; j < ruleParts.length; j++) { String part = StringUtils.trimToNull(ruleParts[j]); if (part == null) continue; partCounter++; log.debug("parsed rule part " + part); if (partCounter == 1) { rule.setFrom(part); } if (partCounter == 2 && !"-".equals(part)) { rule.setTo(part); } if (part.startsWith("[") && part.endsWith("]")) { processRuleFlags(rule, part); } } } else { log.error("could not parse rule from " + line); } } else { log.error("cannot parse " + line); } return rule; } private void processRewriteEngine(Conf conf, String line) { boolean enabled = true; Matcher engineMatcher = ENGINE_PATTERN.matcher(line); if (engineMatcher.matches()) { String enabledStr = StringUtils.trim(engineMatcher.group(1)); log.debug("RewriteEngine value parsed as '" + enabledStr + "'"); if ("0".equalsIgnoreCase(enabledStr) || "false".equalsIgnoreCase(enabledStr) || "no".equalsIgnoreCase(enabledStr) || "off".equalsIgnoreCase(enabledStr)) enabled = false; log.debug("RewriteEngine as boolean '" + enabled + "'"); } else { log.error("cannot parse " + line); } conf.setEngineEnabled(enabled); } private void processRuleFlags(NormalRule rule, String part) { String rawFlags = StringUtils.trimToNull(part.substring(1, part.length() - 1)); if (rawFlags != null) { String[] flags = rawFlags.split(","); for (int k = 0; k < flags.length; k++) { String flag = flags[k]; String flagValue = null; if (flag.indexOf("=") != -1) { flagValue = flag.substring(flag.indexOf("=") + 1); flag = flag.substring(0, flag.indexOf("=")); } flag = flag.toLowerCase(); /* # 'chain|C' (chained with next rule) This flag chains the current rule with the next rule (which itself can be chained with the following rule, and so on). This has the following effect: if a rule matches, then processing continues as usual - the flag has no effect. If the rule does not match, then all following chained rules are skipped. For instance, it can be used to remove the ``.www'' part, inside a per-directory rule set, when you let an external redirect happen (where the ``.www'' part should not occur!). */ if ("chain".equalsIgnoreCase(flag) || "C".equalsIgnoreCase(flag)) { log.info("chain flag [C] not supported"); } /* # 'cookie|CO=NAME:VAL:domain[:lifetime[:path]]' (set cookie) This sets a cookie in the client's browser. The cookie's name is specified by NAME and the value is VAL. The domain field is the domain of the cookie, such as '.apache.org', the optional lifetime is the lifetime of the cookie in minutes, and the optional path is the path of the cookie */ if ("cookie".equalsIgnoreCase(flag) || "CO".equalsIgnoreCase(flag)) { SetAttribute set = new SetAttribute(); set.setType("cookie"); String cookieName = flagValue; String cookieValue = null; if (flagValue != null) { int colon = flagValue.indexOf(":"); if (colon != -1) { cookieValue = flagValue.substring(colon + 1); cookieName = flagValue.substring(0, colon); } } set.setName(cookieName); // NOTE: The colon separated domain, lifetime, path are // handled by SetAttribute.setValue() set.setValue(cookieValue); rule.addSetAttribute(set); } /* # 'env|E=VAR:VAL' (set environment variable) This forces an environment variable named VAR to be set to the value VAL, where VAL can contain regexp backreferences ($N and %N) which will be expanded. You can use this flag more than once, to set more than one variable. The variables can later be dereferenced in many situations, most commonly from within XSSI (via <!--#echo var="VAR"-->) or CGI ($ENV{'VAR'}). You can also dereference the variable in a later RewriteCond pattern, using %{ENV:VAR}. Use this to strip information from URLs, while maintaining a record of that information. */ if ("env".equalsIgnoreCase(flag) || "E".equalsIgnoreCase(flag)) { log.info("env flag [E] not supported"); } /* # 'forbidden|F' (force URL to be forbidden) This forces the current URL to be forbidden - it immediately sends back a HTTP response of 403 (FORBIDDEN). Use this flag in conjunction with appropriate RewriteConds to conditionally block some URLs. */ if ("forbidden".equalsIgnoreCase(flag) || "F".equalsIgnoreCase(flag)) { SetAttribute set = new SetAttribute(); set.setType("status"); set.setValue("403"); rule.addSetAttribute(set); } /* # 'gone|G' (force URL to be gone) This forces the current URL to be gone - it immediately sends back a HTTP response of 410 (GONE). Use this flag to mark pages which no longer exist as gone. */ if ("gone".equalsIgnoreCase(flag) || "G".equalsIgnoreCase(flag)) { SetAttribute set = new SetAttribute(); set.setType("status"); set.setValue("410"); rule.addSetAttribute(set); } /* # 'last|L' (last rule) Stop the rewriting process here and don't apply any more rewrite rules. This corresponds to the Perl last command or the break command in C. Use this flag to prevent the currently rewritten URL from being rewritten further by following rules. For example, use it to rewrite the root-path URL ('/') to a real one, e.g., '/e/www/'. */ if ("last".equalsIgnoreCase(flag) || "L".equalsIgnoreCase(flag)) { rule.setToLast("true"); } /* # 'next|N' (next round) Re-run the rewriting process (starting again with the first rewriting rule). This time, the URL to match is no longer the original URL, but rather the URL returned by the last rewriting rule. This corresponds to the Perl next command or the continue command in C. Use this flag to restart the rewriting process - to immediately go to the top of the loop. Be careful not to create an infinite loop! */ if ("next".equalsIgnoreCase(flag) || "N".equalsIgnoreCase(flag)) { log.info("next flag [N] not supported"); } /* # 'nocase|NC' (no case) This makes the Pattern case-insensitive, ignoring difference between 'A-Z' and 'a-z' when Pattern is matched against the current URL. */ if ("nocase".equalsIgnoreCase(flag) || "NC".equalsIgnoreCase(flag)) { rule.setFromCaseSensitive(false); } /* # 'noescape|NE' (no URI escaping of output) This flag prevents mod_rewrite from applying the usual URI escaping rules to the result of a rewrite. Ordinarily, special characters (such as '%', '$', ';', and so on) will be escaped into their hexcode equivalents ('%25', '%24', and '%3B', respectively); this flag prevents this from happening. This allows percent symbols to appear in the output, as in RewriteRule /foo/(.*) /bar?arg=P1\%3d$1 [R,NE] which would turn '/foo/zed' into a safe request for '/bar?arg=P1=zed'. */ if ("noescape".equalsIgnoreCase(flag) || "NE".equalsIgnoreCase(flag)) { rule.setEncodeToUrl(false); } /* # 'nosubreq|NS' ( not for internal sub-requests) This flag forces the rewrite engine to skip a rewrite rule if the current request is an internal sub-request. For instance, sub-requests occur internally in Apache when mod_include tries to find out information about possible directory default files (index.xxx). On sub-requests it is not always useful, and can even cause errors, if the complete set of rules are applied. Use this flag to exclude some rules. To decide whether or not to use this rule: if you prefix URLs with CGI-scripts, to force them to be processed by the CGI-script, it's likely that you will run into problems (or significant overhead) on sub-requests. In these cases, use this flag. */ if ("nosubreq".equalsIgnoreCase(flag) || "NS".equalsIgnoreCase(flag)) { log.info("nosubreq flag [NS] not supported"); } /* # 'proxy|P' (force proxy) This flag forces the substitution part to be internally sent as a proxy request and immediately (rewrite processing stops here) put through the proxy module. You must make sure that the substitution string is a valid URI (typically starting with http://hostname) which can be handled by the Apache proxy module. If not, you will get an error from the proxy module. Use this flag to achieve a more powerful implementation of the ProxyPass directive, to map remote content into the namespace of the local server. Note: mod_proxy must be enabled in order to use this flag. */ if ("proxy".equalsIgnoreCase(flag) || "P".equalsIgnoreCase(flag)) { rule.setToType("proxy"); } /* # 'passthrough|PT' (pass through to next handler) This flag forces the rewrite engine to set the uri field of the internal request_rec structure to the value of the filename field. This flag is just a hack to enable post-processing of the output of RewriteRule directives, using Alias, ScriptAlias, Redirect, and other directives from various URI-to-filename translators. For example, to rewrite /abc to /def using mod_rewrite, and then /def to /ghi using mod_alias: RewriteRule ^/abc(.*) /def$1 [PT] Alias /def /ghi If you omit the PT flag, mod_rewrite will rewrite uri=/abc/... to filename=/def/... as a full API-compliant URI-to-filename translator should do. Then mod_alias will try to do a URI-to-filename transition, which will fail. Note: You must use this flag if you want to mix directives from different modules which allow URL-to-filename translators. The typical example is the use of mod_alias and mod_rewrite. */ if ("passthrough".equalsIgnoreCase(flag) || "PT".equalsIgnoreCase(flag)) { rule.setToType("forward"); } /* # 'qsappend|QSA' (query string append) This flag forces the rewrite engine to append a query string part of the substitution string to the existing string, instead of replacing it. Use this when you want to add more data to the query string via a rewrite rule. */ if ("qsappend".equalsIgnoreCase(flag) || "QSA".equalsIgnoreCase(flag)) { log.info("qsappend flag [QSA] not supported"); } /* # 'redirect|R [=code]' (force redirect) Prefix Substitution with http://thishost[:thisport]/ (which makes the new URL a URI) to force a external redirection. If no code is given, a HTTP response of 302 (MOVED TEMPORARILY) will be returned. If you want to use other response codes in the range 300-400, simply specify the appropriate number or use one of the following symbolic names: temp (default), permanent, seeother. Use this for rules to canonicalize the URL and return it to the client - to translate ``/~'' into ``/u/'', or to always append a slash to /u/user, etc. Note: When you use this flag, make sure that the substitution field is a valid URL! Otherwise, you will be redirecting to an invalid location. Remember that this flag on its own will only prepend http://thishost[:thisport]/ to the URL, and rewriting will continue. Usually, you will want to stop rewriting at this point, and redirect immediately. To stop rewriting, you should add the 'L' flag. */ if ("redirect".equalsIgnoreCase(flag) || "R".equalsIgnoreCase(flag)) { if ("301".equals(flagValue)) { rule.setToType("permanent-redirect"); } else if ("302".equals(flagValue)) { rule.setToType("temporary-redirect"); } else { rule.setToType("redirect"); } } /* # 'skip|S=num' (skip next rule(s)) This flag forces the rewriting engine to skip the next num rules in sequence, if the current rule matches. Use this to make pseudo if-then-else constructs: The last rule of the then-clause becomes skip=N, where N is the number of rules in the else-clause. (This is not the same as the 'chain|C' flag!) */ if ("skip".equalsIgnoreCase(flag) || "S".equalsIgnoreCase(flag)) { log.info("Skip flag [S] not supported"); } /* # 'type|T=MIME-type' (force MIME type) Force the MIME-type of the target file to be MIME-type. This can be used to set up the content-type based on some conditions. For example, the following snippet allows .php files to be displayed by mod_php if they are called with the .phps extension: */ if ("type".equalsIgnoreCase(flag) || "T".equalsIgnoreCase(flag)) { SetAttribute set = new SetAttribute(); set.setType("content-type"); set.setValue(flagValue); rule.addSetAttribute(set); } } } else { log.error("cannot parse flags from " + part); } } private Condition processRewriteCond(String rewriteCondLine) { log.debug("about to parse condition"); Condition condition = new Condition(); Matcher condMatcher = CONDITION_PATTERN.matcher(rewriteCondLine); if (condMatcher.matches()) { String conditionParts = StringUtils.trimToNull(condMatcher.group(1)); if (conditionParts != null) { String[] condParts = conditionParts.split(" "); for (int i = 0; i < condParts.length; i++) { String part = StringUtils.trimToNull(condParts[i]); if (part == null) continue; if (part.equalsIgnoreCase("%{HTTP_USER_AGENT}")) { condition.setType("header"); condition.setName("user-agent"); } else if (part.equalsIgnoreCase("%{HTTP_REFERER}")) { condition.setType("header"); condition.setName("referer"); } else if (part.equalsIgnoreCase("%{HTTP_COOKIE}")) { condition.setType("header"); condition.setName("cookie"); } else if (part.equalsIgnoreCase("%{HTTP_FORWARDED}")) { condition.setType("header"); condition.setName("forwarded"); } else if (part.equalsIgnoreCase("%{HTTP_PROXY_CONNECTION}")) { condition.setType("header"); condition.setName("proxy-connection"); } else if (part.equalsIgnoreCase("%{HTTP_ACCEPT}")) { condition.setType("header"); condition.setName("accept"); } else if (part.equalsIgnoreCase("%{HTTP_HOST}")) { condition.setType("server-name"); } else if (part.equalsIgnoreCase("%{REMOTE_ADDR}")) { condition.setType("remote-addr"); } else if (part.equalsIgnoreCase("%{REMOTE_HOST}")) { condition.setType("remote-host"); } else if (part.equalsIgnoreCase("%{REMOTE_USER}")) { condition.setType("remote-user"); } else if (part.equalsIgnoreCase("%{REQUEST_METHOD}")) { condition.setType("method"); } else if (part.equalsIgnoreCase("%{QUERY_STRING}")) { condition.setType("query-string"); } else if (part.equalsIgnoreCase("%{TIME_YEAR}")) { condition.setType("year"); } else if (part.equalsIgnoreCase("%{TIME_MON}")) { condition.setType("month"); } else if (part.equalsIgnoreCase("%{TIME_DAY}")) { condition.setType("dayofmonth"); } else if (part.equalsIgnoreCase("%{TIME_WDAY}")) { condition.setType("dayofweek"); } else if (part.equalsIgnoreCase("%{TIME_HOUR}")) { condition.setType("hourofday"); } else if (part.equalsIgnoreCase("%{TIME_MIN}")) { condition.setType("minute"); } else if (part.equalsIgnoreCase("%{TIME_SEC}")) { condition.setType("second"); } else if (part.equalsIgnoreCase("%{PATH_INFO}")) { condition.setType("path-info"); } else if (part.equalsIgnoreCase("%{AUTH_TYPE}")) { condition.setType("auth-type"); } else if (part.equalsIgnoreCase("%{SERVER_PORT}")) { condition.setType("port"); } else if (part.equalsIgnoreCase("%{REQUEST_URI}")) { condition.setType("request-uri"); } else if (part.equalsIgnoreCase("%{REQUEST_FILENAME}")) { condition.setType("request-filename"); } else if (part.equals("-f") || part.equals("-F")) { condition.setOperator("isfile"); } else if (part.equals("-d")) { condition.setOperator("isdir"); } else if (part.equalsIgnoreCase("-s")) { condition.setOperator("isfilewithsize"); } else if (part.equals("!-f") || part.equals("!-F")) { condition.setOperator("notfile"); } else if (part.equals("!-d")) { condition.setOperator("notdir"); } else if (part.equalsIgnoreCase("!-s")) { condition.setOperator("notfilewithsize"); //todo: bits below this comment } else if (part.equalsIgnoreCase("%{REMOTE_PORT}")) { log.error("REMOTE_PORT currently unsupported, ignoring"); } else if (part.equalsIgnoreCase("%{REMOTE_IDENT}")) { log.error("REMOTE_IDENT currently unsupported, ignoring"); } else if (part.equalsIgnoreCase("%{SCRIPT_FILENAME}")) { log.error("SCRIPT_FILENAME currently unsupported, ignoring"); } else if (part.equalsIgnoreCase("%{DOCUMENT_ROOT}")) { log.error("DOCUMENT_ROOT currently unsupported, ignoring"); } else if (part.equalsIgnoreCase("%{SERVER_ADMIN}")) { log.error("SERVER_ADMIN currently unsupported, ignoring"); } else if (part.equalsIgnoreCase("%{SERVER_NAME}")) { log.error("SERVER_NAME currently unsupported, ignoring"); } else if (part.equalsIgnoreCase("%{SERVER_ADDR}")) { log.error("SERVER_ADDR currently unsupported, ignoring"); } else if (part.equalsIgnoreCase("%{SERVER_PROTOCOL}")) { log.error("SERVER_PROTOCOL currently unsupported, ignoring"); } else if (part.equalsIgnoreCase("%{SERVER_SOFTWARE}")) { log.error("SERVER_SOFTWARE currently unsupported, ignoring"); } else if (part.equalsIgnoreCase("%{TIME}")) { log.error("TIME currently unsupported, ignoring"); } else if (part.equalsIgnoreCase("%{API_VERSION}")) { log.error("API_VERSION currently unsupported, ignoring"); } else if (part.equalsIgnoreCase("%{THE_REQUEST}")) { log.error("THE_REQUEST currently unsupported, ignoring"); } else if (part.equalsIgnoreCase("%{IS_SUBREQ}")) { log.error("IS_SUBREQ currently unsupported, ignoring"); } else if (part.equalsIgnoreCase("%{HTTPS}")) { log.error("HTTPS currently unsupported, ignoring"); //todo: note https in mod_rewrite means "on" in URF land it means true } else { condition.setValue(part); } } } else { log.error("could not parse condition from " + rewriteCondLine); } } else { log.error("cannot parse " + rewriteCondLine); } return condition; } }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/container/v1/cluster_service.proto package com.google.container.v1; /** * * * <pre> * Configuration for NetworkPolicy. This only tracks whether the addon * is enabled or not on the Master, it does not track whether network policy * is enabled for the nodes. * </pre> * * Protobuf type {@code google.container.v1.NetworkPolicyConfig} */ public final class NetworkPolicyConfig extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.container.v1.NetworkPolicyConfig) NetworkPolicyConfigOrBuilder { private static final long serialVersionUID = 0L; // Use NetworkPolicyConfig.newBuilder() to construct. private NetworkPolicyConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private NetworkPolicyConfig() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new NetworkPolicyConfig(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private NetworkPolicyConfig( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { disabled_ = input.readBool(); break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.container.v1.ClusterServiceProto .internal_static_google_container_v1_NetworkPolicyConfig_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.container.v1.ClusterServiceProto .internal_static_google_container_v1_NetworkPolicyConfig_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.container.v1.NetworkPolicyConfig.class, com.google.container.v1.NetworkPolicyConfig.Builder.class); } public static final int DISABLED_FIELD_NUMBER = 1; private boolean disabled_; /** * * * <pre> * Whether NetworkPolicy is enabled for this cluster. * </pre> * * <code>bool disabled = 1;</code> * * @return The disabled. */ @java.lang.Override public boolean getDisabled() { return disabled_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (disabled_ != false) { output.writeBool(1, disabled_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (disabled_ != false) { size += com.google.protobuf.CodedOutputStream.computeBoolSize(1, disabled_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.container.v1.NetworkPolicyConfig)) { return super.equals(obj); } com.google.container.v1.NetworkPolicyConfig other = (com.google.container.v1.NetworkPolicyConfig) obj; if (getDisabled() != other.getDisabled()) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + DISABLED_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getDisabled()); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.container.v1.NetworkPolicyConfig parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.container.v1.NetworkPolicyConfig parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.container.v1.NetworkPolicyConfig parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.container.v1.NetworkPolicyConfig parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.container.v1.NetworkPolicyConfig parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.container.v1.NetworkPolicyConfig parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.container.v1.NetworkPolicyConfig parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.container.v1.NetworkPolicyConfig parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.container.v1.NetworkPolicyConfig parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.container.v1.NetworkPolicyConfig parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.container.v1.NetworkPolicyConfig parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.container.v1.NetworkPolicyConfig parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.container.v1.NetworkPolicyConfig prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Configuration for NetworkPolicy. This only tracks whether the addon * is enabled or not on the Master, it does not track whether network policy * is enabled for the nodes. * </pre> * * Protobuf type {@code google.container.v1.NetworkPolicyConfig} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.container.v1.NetworkPolicyConfig) com.google.container.v1.NetworkPolicyConfigOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.container.v1.ClusterServiceProto .internal_static_google_container_v1_NetworkPolicyConfig_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.container.v1.ClusterServiceProto .internal_static_google_container_v1_NetworkPolicyConfig_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.container.v1.NetworkPolicyConfig.class, com.google.container.v1.NetworkPolicyConfig.Builder.class); } // Construct using com.google.container.v1.NetworkPolicyConfig.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); disabled_ = false; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.container.v1.ClusterServiceProto .internal_static_google_container_v1_NetworkPolicyConfig_descriptor; } @java.lang.Override public com.google.container.v1.NetworkPolicyConfig getDefaultInstanceForType() { return com.google.container.v1.NetworkPolicyConfig.getDefaultInstance(); } @java.lang.Override public com.google.container.v1.NetworkPolicyConfig build() { com.google.container.v1.NetworkPolicyConfig result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.container.v1.NetworkPolicyConfig buildPartial() { com.google.container.v1.NetworkPolicyConfig result = new com.google.container.v1.NetworkPolicyConfig(this); result.disabled_ = disabled_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.container.v1.NetworkPolicyConfig) { return mergeFrom((com.google.container.v1.NetworkPolicyConfig) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.container.v1.NetworkPolicyConfig other) { if (other == com.google.container.v1.NetworkPolicyConfig.getDefaultInstance()) return this; if (other.getDisabled() != false) { setDisabled(other.getDisabled()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.container.v1.NetworkPolicyConfig parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.container.v1.NetworkPolicyConfig) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private boolean disabled_; /** * * * <pre> * Whether NetworkPolicy is enabled for this cluster. * </pre> * * <code>bool disabled = 1;</code> * * @return The disabled. */ @java.lang.Override public boolean getDisabled() { return disabled_; } /** * * * <pre> * Whether NetworkPolicy is enabled for this cluster. * </pre> * * <code>bool disabled = 1;</code> * * @param value The disabled to set. * @return This builder for chaining. */ public Builder setDisabled(boolean value) { disabled_ = value; onChanged(); return this; } /** * * * <pre> * Whether NetworkPolicy is enabled for this cluster. * </pre> * * <code>bool disabled = 1;</code> * * @return This builder for chaining. */ public Builder clearDisabled() { disabled_ = false; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.container.v1.NetworkPolicyConfig) } // @@protoc_insertion_point(class_scope:google.container.v1.NetworkPolicyConfig) private static final com.google.container.v1.NetworkPolicyConfig DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.container.v1.NetworkPolicyConfig(); } public static com.google.container.v1.NetworkPolicyConfig getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<NetworkPolicyConfig> PARSER = new com.google.protobuf.AbstractParser<NetworkPolicyConfig>() { @java.lang.Override public NetworkPolicyConfig parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new NetworkPolicyConfig(input, extensionRegistry); } }; public static com.google.protobuf.Parser<NetworkPolicyConfig> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<NetworkPolicyConfig> getParserForType() { return PARSER; } @java.lang.Override public com.google.container.v1.NetworkPolicyConfig getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* Copyright 2014-2016 Intel Corporation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package apple.audiotoolbox; import apple.NSObject; import apple.foundation.NSArray; import apple.foundation.NSError; import apple.foundation.NSMethodSignature; import apple.foundation.NSSet; import apple.foundation.protocol.NSFastEnumeration; import org.moe.natj.c.ann.FunctionPtr; import org.moe.natj.general.NatJ; import org.moe.natj.general.Pointer; import org.moe.natj.general.ann.Generated; import org.moe.natj.general.ann.Library; import org.moe.natj.general.ann.Mapped; import org.moe.natj.general.ann.NInt; import org.moe.natj.general.ann.NUInt; import org.moe.natj.general.ann.Owned; import org.moe.natj.general.ann.ReferenceInfo; import org.moe.natj.general.ann.Runtime; import org.moe.natj.general.ptr.Ptr; import org.moe.natj.general.ptr.VoidPtr; import org.moe.natj.objc.Class; import org.moe.natj.objc.ObjCObject; import org.moe.natj.objc.ObjCRuntime; import org.moe.natj.objc.SEL; import org.moe.natj.objc.ann.ObjCClassBinding; import org.moe.natj.objc.ann.Selector; import org.moe.natj.objc.map.ObjCObjectMapper; /** * AUAudioUnitBusArray * <p> * Container for an audio unit's input or output busses. * <p> * Hosts can observe a bus property across all busses by using KVO on this object, without * having to observe it on each individual bus. (One could add listeners to individual busses, * but that means one has to observe bus count changes and add/remove listeners in response. * Also, NSArray's addObserver:toObjectsAtIndexes:forKeyPath:options:context: is problematic; * it does not let the individual objects override the observation request, and so a bus which * is proxying a bus in an extension process does not get the message.) * <p> * Some audio units (e.g. mixers) support variable numbers of busses, via subclassing. When the * bus count changes, a KVO notification is sent on "inputBusses" or "outputBusses," as * appropriate. * <p> * Subclassers should see also the AUAudioUnitBusImplementation category. * <p> * The bus array is bridged to the v2 property kAudioUnitProperty_ElementCount. */ @Generated @Library("AudioToolbox") @Runtime(ObjCRuntime.class) @ObjCClassBinding public class AUAudioUnitBusArray extends NSObject implements NSFastEnumeration { static { NatJ.register(); } @Generated protected AUAudioUnitBusArray(Pointer peer) { super(peer); } @Generated @Selector("accessInstanceVariablesDirectly") public static native boolean accessInstanceVariablesDirectly(); @Generated @Owned @Selector("alloc") public static native AUAudioUnitBusArray alloc(); @Owned @Generated @Selector("allocWithZone:") public static native AUAudioUnitBusArray allocWithZone(VoidPtr zone); @Generated @Selector("automaticallyNotifiesObserversForKey:") public static native boolean automaticallyNotifiesObserversForKey(String key); @Generated @Selector("cancelPreviousPerformRequestsWithTarget:") public static native void cancelPreviousPerformRequestsWithTarget(@Mapped(ObjCObjectMapper.class) Object aTarget); @Generated @Selector("cancelPreviousPerformRequestsWithTarget:selector:object:") public static native void cancelPreviousPerformRequestsWithTargetSelectorObject( @Mapped(ObjCObjectMapper.class) Object aTarget, SEL aSelector, @Mapped(ObjCObjectMapper.class) Object anArgument); @Generated @Selector("classFallbacksForKeyedArchiver") public static native NSArray<String> classFallbacksForKeyedArchiver(); @Generated @Selector("classForKeyedUnarchiver") public static native Class classForKeyedUnarchiver(); @Generated @Selector("debugDescription") public static native String debugDescription_static(); @Generated @Selector("description") public static native String description_static(); @Generated @Selector("hash") @NUInt public static native long hash_static(); @Generated @Selector("instanceMethodForSelector:") @FunctionPtr(name = "call_instanceMethodForSelector_ret") public static native NSObject.Function_instanceMethodForSelector_ret instanceMethodForSelector(SEL aSelector); @Generated @Selector("instanceMethodSignatureForSelector:") public static native NSMethodSignature instanceMethodSignatureForSelector(SEL aSelector); @Generated @Selector("instancesRespondToSelector:") public static native boolean instancesRespondToSelector(SEL aSelector); @Generated @Selector("isSubclassOfClass:") public static native boolean isSubclassOfClass(Class aClass); @Generated @Selector("keyPathsForValuesAffectingValueForKey:") public static native NSSet<String> keyPathsForValuesAffectingValueForKey(String key); @Generated @Owned @Selector("new") public static native AUAudioUnitBusArray new_objc(); @Generated @Selector("resolveClassMethod:") public static native boolean resolveClassMethod(SEL sel); @Generated @Selector("resolveInstanceMethod:") public static native boolean resolveInstanceMethod(SEL sel); @Generated @Selector("setVersion:") public static native void setVersion_static(@NInt long aVersion); @Generated @Selector("superclass") public static native Class superclass_static(); @Generated @Selector("version") @NInt public static native long version_static(); /** * addObserverToAllBusses:forKeyPath:options:context: * <p> * Add a KVO observer for a property on all busses in the array. */ @Generated @Selector("addObserverToAllBusses:forKeyPath:options:context:") public native void addObserverToAllBussesForKeyPathOptionsContext(NSObject observer, String keyPath, @NUInt long options, VoidPtr context); /** * Which bus array this is (input or output). */ @Generated @Selector("busType") @NInt public native long busType(); /** * [@property] count */ @Generated @Selector("count") @NUInt public native long count(); @Generated @Selector("countByEnumeratingWithState:objects:count:") @NUInt public native long countByEnumeratingWithStateObjectsCount(VoidPtr state, @ReferenceInfo(type = ObjCObject.class) Ptr<ObjCObject> buffer, @NUInt long len); @Generated @Selector("init") public native AUAudioUnitBusArray init(); /** * initWithAudioUnit:busType: * <p> * Initializes an empty bus array. */ @Generated @Selector("initWithAudioUnit:busType:") public native AUAudioUnitBusArray initWithAudioUnitBusType(AUAudioUnit owner, @NInt long busType); /** * initWithAudioUnit:busType:busses: * <p> * Initializes by making a copy of the supplied bus array. */ @Generated @Selector("initWithAudioUnit:busType:busses:") public native AUAudioUnitBusArray initWithAudioUnitBusTypeBusses(AUAudioUnit owner, @NInt long busType, NSArray<? extends AUAudioUnitBus> busArray); /** * [@property] countChangeable * <p> * Whether the array can have a variable number of busses. * <p> * The base implementation returns false. */ @Generated @Selector("isCountChangeable") public native boolean isCountChangeable(); /** * objectAtIndexedSubscript: */ @Generated @Selector("objectAtIndexedSubscript:") public native AUAudioUnitBus objectAtIndexedSubscript(@NUInt long index); /** * The audio unit that owns the bus. */ @Generated @Selector("ownerAudioUnit") public native AUAudioUnit ownerAudioUnit(); /** * removeObserverFromAllBusses:forKeyPath:context: * <p> * Remove a KVO observer for a property on all busses in the array. */ @Generated @Selector("removeObserverFromAllBusses:forKeyPath:context:") public native void removeObserverFromAllBussesForKeyPathContext(NSObject observer, String keyPath, VoidPtr context); /** * Sets the bus array to be a copy of the supplied array. The base class issues KVO notifications. */ @Generated @Selector("replaceBusses:") public native void replaceBusses(NSArray<? extends AUAudioUnitBus> busArray); /** * [@property] setBusCount:error: * <p> * Change the number of busses in the array. */ @Generated @Selector("setBusCount:error:") public native boolean setBusCountError(@NUInt long count, @ReferenceInfo(type = NSError.class) Ptr<NSError> outError); }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.test.engine; import org.apache.lucene.index.AssertingDirectoryReader; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.FilterDirectoryReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.search.AssertingIndexSearcher; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.QueryCache; import org.apache.lucene.search.QueryCachingPolicy; import org.apache.lucene.search.SearcherManager; import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.EngineConfig; import org.elasticsearch.index.engine.EngineException; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESIntegTestCase; import java.io.Closeable; import java.io.IOException; import java.lang.reflect.Constructor; import java.util.IdentityHashMap; import java.util.Random; import java.util.concurrent.atomic.AtomicBoolean; /** * Support class to build MockEngines like {@link org.elasticsearch.test.engine.MockInternalEngine} or {@link org.elasticsearch.test.engine.MockShadowEngine} * since they need to subclass the actual engine */ public final class MockEngineSupport { public static final String WRAP_READER_RATIO = "index.engine.mock.random.wrap_reader_ratio"; public static final String READER_WRAPPER_TYPE = "index.engine.mock.random.wrapper"; public static final String FLUSH_ON_CLOSE_RATIO = "index.engine.mock.flush_on_close.ratio"; private final AtomicBoolean closing = new AtomicBoolean(false); private final ESLogger logger = Loggers.getLogger(Engine.class); private final ShardId shardId; private final QueryCache filterCache; private final QueryCachingPolicy filterCachingPolicy; private final SearcherCloseable searcherCloseable; private final MockContext mockContext; public static class MockContext { private final Random random; private final boolean wrapReader; private final Class<? extends FilterDirectoryReader> wrapper; private final Settings indexSettings; private final double flushOnClose; public MockContext(Random random, boolean wrapReader, Class<? extends FilterDirectoryReader> wrapper, Settings indexSettings) { this.random = random; this.wrapReader = wrapReader; this.wrapper = wrapper; this.indexSettings = indexSettings; flushOnClose = indexSettings.getAsDouble(FLUSH_ON_CLOSE_RATIO, 0.5d); } } public MockEngineSupport(EngineConfig config) { Settings indexSettings = config.getIndexSettings(); shardId = config.getShardId(); filterCache = config.getQueryCache(); filterCachingPolicy = config.getQueryCachingPolicy(); final long seed = indexSettings.getAsLong(ESIntegTestCase.SETTING_INDEX_SEED, 0l); Random random = new Random(seed); final double ratio = indexSettings.getAsDouble(WRAP_READER_RATIO, 0.0d); // DISABLED by default - AssertingDR is crazy slow Class<? extends AssertingDirectoryReader> wrapper = indexSettings.getAsClass(READER_WRAPPER_TYPE, AssertingDirectoryReader.class); boolean wrapReader = random.nextDouble() < ratio; if (logger.isTraceEnabled()) { logger.trace("Using [{}] for shard [{}] seed: [{}] wrapReader: [{}]", this.getClass().getName(), shardId, seed, wrapReader); } mockContext = new MockContext(random, wrapReader, wrapper, indexSettings); this.searcherCloseable = new SearcherCloseable(); LuceneTestCase.closeAfterSuite(searcherCloseable); // only one suite closeable per Engine } enum CloseAction { FLUSH_AND_CLOSE, CLOSE; } /** * Returns the CloseAction to execute on the actual engine. Note this method changes the state on * the first call and treats subsequent calls as if the engine passed is already closed. */ public CloseAction flushOrClose(Engine engine, CloseAction originalAction) throws IOException { if (closing.compareAndSet(false, true)) { // only do the random thing if we are the first call to this since super.flushOnClose() calls #close() again and then we might end up with a stackoverflow. if (mockContext.flushOnClose > mockContext.random.nextDouble()) { return CloseAction.FLUSH_AND_CLOSE; } else { return CloseAction.CLOSE; } } else { return originalAction; } } public AssertingIndexSearcher newSearcher(String source, IndexSearcher searcher, SearcherManager manager) throws EngineException { IndexReader reader = searcher.getIndexReader(); IndexReader wrappedReader = reader; assert reader != null; if (reader instanceof DirectoryReader && mockContext.wrapReader) { wrappedReader = wrapReader((DirectoryReader) reader); } // this executes basic query checks and asserts that weights are normalized only once etc. final AssertingIndexSearcher assertingIndexSearcher = new AssertingIndexSearcher(mockContext.random, wrappedReader); assertingIndexSearcher.setSimilarity(searcher.getSimilarity(true)); assertingIndexSearcher.setQueryCache(filterCache); assertingIndexSearcher.setQueryCachingPolicy(filterCachingPolicy); return assertingIndexSearcher; } private DirectoryReader wrapReader(DirectoryReader reader) { try { Constructor<?>[] constructors = mockContext.wrapper.getConstructors(); Constructor<?> nonRandom = null; for (Constructor<?> constructor : constructors) { Class<?>[] parameterTypes = constructor.getParameterTypes(); if (parameterTypes.length > 0 && parameterTypes[0] == DirectoryReader.class) { if (parameterTypes.length == 1) { nonRandom = constructor; } else if (parameterTypes.length == 2 && parameterTypes[1] == Settings.class) { return (DirectoryReader) constructor.newInstance(reader, mockContext.indexSettings); } } } if (nonRandom != null) { return (DirectoryReader) nonRandom.newInstance(reader); } } catch (Exception e) { throw new ElasticsearchException("Can not wrap reader", e); } return reader; } public static abstract class DirectoryReaderWrapper extends FilterDirectoryReader { protected final SubReaderWrapper subReaderWrapper; public DirectoryReaderWrapper(DirectoryReader in, SubReaderWrapper subReaderWrapper) throws IOException { super(in, subReaderWrapper); this.subReaderWrapper = subReaderWrapper; } @Override public Object getCoreCacheKey() { return in.getCoreCacheKey(); } @Override public Object getCombinedCoreAndDeletesKey() { return in.getCombinedCoreAndDeletesKey(); } } public Engine.Searcher wrapSearcher(String source, Engine.Searcher engineSearcher, IndexSearcher searcher, SearcherManager manager) { final AssertingIndexSearcher assertingIndexSearcher = newSearcher(source, searcher, manager); assertingIndexSearcher.setSimilarity(searcher.getSimilarity(true)); // pass the original searcher to the super.newSearcher() method to make sure this is the searcher that will // be released later on. If we wrap an index reader here must not pass the wrapped version to the manager // on release otherwise the reader will be closed too early. - good news, stuff will fail all over the place if we don't get this right here AssertingSearcher assertingSearcher = new AssertingSearcher(assertingIndexSearcher, engineSearcher, shardId, logger) { @Override public void close() { try { searcherCloseable.remove(this); } finally { super.close(); } } }; searcherCloseable.add(assertingSearcher, engineSearcher.source()); return assertingSearcher; } private static final class SearcherCloseable implements Closeable { private final IdentityHashMap<AssertingSearcher, RuntimeException> openSearchers = new IdentityHashMap<>(); @Override public synchronized void close() throws IOException { if (openSearchers.isEmpty() == false) { AssertionError error = new AssertionError("Unreleased searchers found"); for (RuntimeException ex : openSearchers.values()) { error.addSuppressed(ex); } throw error; } } void add(AssertingSearcher searcher, String source) { final RuntimeException ex = new RuntimeException("Unreleased Searcher, source [" + source+ "]"); synchronized (this) { openSearchers.put(searcher, ex); } } synchronized void remove(AssertingSearcher searcher) { openSearchers.remove(searcher); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.wicket.markup.html.form.upload; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.ArrayList; import java.util.List; import org.apache.commons.fileupload2.FileItem; import org.apache.wicket.Session; import org.apache.wicket.WicketRuntimeException; import org.apache.wicket.request.cycle.RequestCycle; import org.apache.wicket.util.file.Files; import org.apache.wicket.util.io.IOUtils; import org.apache.wicket.util.lang.Args; import org.apache.wicket.util.string.Strings; /** * Model for file uploads. Objects of this class should not be kept between requests, and should * therefore be marked as <code>transient</code> if they become a property of an IModel. * * @author Jonathan Locke */ public class FileUpload { private static final long serialVersionUID = 1L; private final FileItem item; private transient List<InputStream> inputStreamsToClose; /** * Constructor * * @param item * The uploaded file item */ public FileUpload(final FileItem item) { Args.notNull(item, "item"); this.item = item; } /** * Close the streams which has been opened when getting the InputStream using * {@link #getInputStream()}. All the input streams are closed at the end of the request. This * is done when the FileUploadField, which is associated with this FileUpload is detached. * <p> * If an exception is thrown when closing the input streams, we ignore it, because the stream * might have been closed already. */ public final void closeStreams() { if (inputStreamsToClose != null) { for (InputStream inputStream : inputStreamsToClose) { IOUtils.closeQuietly(inputStream); } // Reset the list inputStreamsToClose = null; } } /** * Deletes temp file from disk */ public void delete() { item.delete(); } /** * @return Uploaded file as an array of bytes */ public byte[] getBytes() { return item.get(); } /** * Get the MD5 checksum. * * @param algorithm * the digest algorithm, e.g. MD5, SHA-1, SHA-256, SHA-512 * * @return The cryptographic digest of the file */ public byte[] getDigest(String algorithm) { try { Args.notEmpty(algorithm, "algorithm"); MessageDigest digest = java.security.MessageDigest.getInstance(algorithm); if (item.isInMemory()) { digest.update(getBytes()); return digest.digest(); } InputStream in = null; try { in = item.getInputStream(); byte[] buf = new byte[Math.min((int)item.getSize(), 4096 * 10)]; int len; while (-1 != (len = in.read(buf))) { digest.update(buf, 0, len); } return digest.digest(); } catch (IOException ex) { throw new WicketRuntimeException("Error while reading input data for " + algorithm + " checksum", ex); } finally { IOUtils.closeQuietly(in); } } catch (NoSuchAlgorithmException ex) { String error = String.format( "Your java runtime does not support digest algorithm [%s]. " + "Please see java.security.MessageDigest.getInstance(\"%s\")", algorithm, algorithm); throw new WicketRuntimeException(error, ex); } } /** * Get the MD5 checksum. * * @return The MD5 checksum of the file */ public byte[] getMD5() { return getDigest("MD5"); } /** * @since 1.2 * @return name of uploaded client side file */ public String getClientFileName() { String name = item.getName(); // when uploading from localhost some browsers will specify the entire path, we strip it // down to just the file name name = Strings.lastPathComponent(name, '/'); name = Strings.lastPathComponent(name, '\\'); return name; } /** * @return Content type for upload */ public String getContentType() { return item.getContentType(); } /** * Get an input stream for the file uploaded. Use this input stream if you can't use * {@link #writeTo(File)} for persisting the uploaded file. This can be if you need to react * upon the content of the file or need to persist it elsewhere, i.e. a database or external * filesystem. * <p> * <b>PLEASE NOTE!</b><br> * The InputStream return will be closed be Wicket at the end of the request. If you need it * across a request you need to hold on to this FileUpload instead. * * @return Input stream with file contents. * @throws IOException */ public InputStream getInputStream() throws IOException { if (inputStreamsToClose == null) { inputStreamsToClose = new ArrayList<InputStream>(); } InputStream is = item.getInputStream(); inputStreamsToClose.add(is); return is; } /** * @return The upload's size */ public long getSize() { return item.getSize(); } /** * Saves this file upload to a given file on the server side. * * @param file * The file * @throws Exception */ public void writeTo(final File file) throws Exception { Files.remove(file); item.write(file); } /** * Convenience method that copies the input stream returned by {@link #getInputStream()} into a * temporary file. * <p> * Only use this if you actually need a {@link File} to work with, in all other cases use * {@link #getInputStream()} or {@link #getBytes()} * * @since 1.2 * * @return temporary file containing the contents of the uploaded file * @throws Exception */ public final File writeToTempFile() throws Exception { String sessionId = Session.exists() ? Session.get().getId() : ""; String tempFileName = sessionId + "_" + RequestCycle.get().getStartTime(); File temp = File.createTempFile(tempFileName, Files.cleanupFilename(item.getFieldName())); writeTo(temp); return temp; } }
package water.parser; import org.junit.*; import water.*; import water.fvec.*; import water.util.Log; import java.io.File; public class ParserTest extends TestUtil { @BeforeClass static public void setup() { stall_till_cloudsize(1); } private final double NaN = Double.NaN; private final char[] SEPARATORS = new char[] {',', ' '}; // Make a ByteVec with the specific Chunks public static Key makeByteVec(String... data) { Futures fs = new Futures(); long[] espc = new long[data.length+1]; for( int i = 0; i < data.length; ++i ) espc[i+1] = espc[i]+data[i].length(); Key k = Vec.newKey(); ByteVec bv = new ByteVec(k,espc); DKV.put(k,bv,fs); for( int i = 0; i < data.length; ++i ) { Key ck = bv.chunkKey(i); DKV.put(ck, new Value(ck,new C1NChunk(data[i].getBytes())),fs); } fs.blockForPending(); return k; } public static boolean compareDoubles(double a, double b, double threshold) { if( a==b ) return true; if( ( Double.isNaN(a) && !Double.isNaN(b)) || (!Double.isNaN(a) && Double.isNaN(b)) ) return false; return !Double.isInfinite(a) && !Double.isInfinite(b) && Math.abs(a-b)/Math.max(Math.abs(a),Math.abs(b)) < threshold; } private static void testParsed(Key k, double[][] expected) { testParsed(k,expected, expected.length); } private static void testParsed(Key k, double[][] expected, int len) { Frame fr = DKV.get(k).get(); testParsed(fr,expected,len); } static void testParsed(Frame fr, double[][] expected, int len) { Assert.assertEquals(len,fr.numRows()); Assert.assertEquals(expected[0].length,fr.numCols()); for( int j = 0; j < fr.numCols(); ++j ) { Vec vec = fr.vecs()[j]; for( int i = 0; i < expected.length; ++i ) { double pval = vec.at(i); if( Double.isNaN(expected[i][j]) ) Assert.assertTrue(i+" -- "+j, vec.isNA(i)); else Assert.assertTrue(expected[i][j]+" -- "+pval,compareDoubles(expected[i][j],pval,0.0000001)); } } fr.delete(); } @Test public void testBasic() { String[] data = new String[] { "1|2|3\n1|2|3", "4|5|6", "4|5.2|", "asdf|qwer|1", "1.1", "1.1|2.1|3.4", }; double[][] exp = new double[][] { ard(1.0, 2.0, 3.0), ard(1.0, 2.0, 3.0), ard(4.0, 5.0, 6.0), ard(4.0, 5.2, NaN), ard(NaN, NaN, 1.0), ard(1.1, NaN, NaN), ard(1.1, 2.1, 3.4), }; for (char separator : SEPARATORS) { String[] dataset = getDataForSeparator(separator, data); StringBuilder sb1 = new StringBuilder(); for( String ds : dataset ) sb1.append(ds).append("\n"); Key k1 = makeByteVec(sb1.toString()); Key r1 = Key.make("r1"); ParseDataset.parse(r1, k1); testParsed(r1,exp); StringBuilder sb2 = new StringBuilder(); for( String ds : dataset ) sb2.append(ds).append("\r\n"); Key k2 = makeByteVec(sb2.toString()); Key r2 = Key.make("r2"); ParseDataset.parse(r2, k2); testParsed(r2,exp); } } @Test public void testChunkBoundaries() { String[] data = new String[] { "1|2|3\n1|2|3\n", "1|2|3\n1|2", "|3\n1|1|1\n", "2|2|2\n2|3|", "4\n3|3|3\n", "3|4|5\n5", ".5|2|3\n5.","5|2|3\n55e-","1|2.0|3.0\n55e","-1|2.0|3.0\n55","e-1|2.0|3.0\n" }; double[][] exp = new double[][] { ard(1.0, 2.0, 3.0), ard(1.0, 2.0, 3.0), ard(1.0, 2.0, 3.0), ard(1.0, 2.0, 3.0), ard(1.0, 1.0, 1.0), ard(2.0, 2.0, 2.0), ard(2.0, 3.0, 4.0), ard(3.0, 3.0, 3.0), ard(3.0, 4.0, 5.0), ard(5.5, 2.0, 3.0), ard(5.5, 2.0, 3.0), ard(5.5, 2.0, 3.0), ard(5.5, 2.0, 3.0), ard(5.5, 2.0, 3.0), }; for (char separator : SEPARATORS) { String[] dataset = getDataForSeparator(separator, data); Key k = makeByteVec(dataset); Key r3 = Key.make(); ParseDataset.parse(r3, k); testParsed(r3,exp); } } @Test public void testChunkBoundariesMixedLineEndings() { String[] data = new String[] { "1|2|3\n4|5|6\n7|8|9", "\r\n10|11|12\n13|14|15", "\n16|17|18\r", "\n19|20|21\n", "22|23|24\n25|26|27\r\n", "28|29|30" }; double[][] exp = new double[][] { ard(1, 2, 3), ard(4, 5, 6), ard(7, 8, 9), ard(10, 11, 12), ard(13, 14, 15), ard(16, 17, 18), ard(19, 20, 21), ard(22, 23, 24), ard(25, 26, 27), ard(28, 29, 30), }; for (char separator : SEPARATORS) { String[] dataset = getDataForSeparator(separator, data); Key k = makeByteVec(dataset); Key r4 = Key.make(); ParseDataset.parse(r4, k); testParsed(r4,exp); } } @Test public void testNondecimalColumns() { String data[] = { "1| 2|one\n" + "3| 4|two\n" + "5| 6|three\n" + "7| 8|one\n" + "9| 10|two\n" + "11|12|three\n" + "13|14|one\n" + "15|16|\"two\"\n" + "17|18|\" four\"\n" + "19|20| three\n", }; double[][] expDouble = new double[][] { ard(1, 2, 1), // preserve order ard(3, 4, 3), ard(5, 6, 2), ard(7, 8, 1), ard(9, 10, 3), ard(11,12, 2), ard(13,14, 1), ard(15,16, 3), ard(17,18, 0), ard(19,20, 2), }; for (char separator : SEPARATORS) { String[] dataset = getDataForSeparator(separator, data); Key key = makeByteVec(dataset); Key r = Key.make(); ParseDataset.parse(r, key); Frame fr = DKV.get(r).get(); String[] cd = fr.vecs()[2].domain(); Assert.assertEquals(" four",cd[0]); Assert.assertEquals("one",cd[1]); Assert.assertEquals("three",cd[2]); Assert.assertEquals("two",cd[3]); testParsed(r, expDouble); } } @Test public void testSingleEntryDatasets() { String[] numericDataset = new String[]{"10.9533122476"}; Key k1 = makeByteVec(numericDataset); Key r1 = Key.make(); ParseDataset.parse(r1, k1); Frame fr1 = DKV.get(r1).get(); Assert.assertTrue(fr1.vecs()[0].isNumeric()); Assert.assertTrue(fr1.numCols() == 1); Assert.assertTrue(fr1.numRows() == 1); fr1.delete(); String[] dateDataset = new String[]{"3-Jan-06"}; Key k2 = makeByteVec(dateDataset); Key r2 = Key.make(); ParseDataset.parse(r2, k2); Frame fr2 = DKV.get(r2).get(); Assert.assertTrue(fr2.vecs()[0].isTime()); Assert.assertTrue(fr2.numCols() == 1); Assert.assertTrue(fr2.numRows() == 1); fr2.delete(); String[] UUIDDataset = new String[]{"9ff4ed3a-6b00-4130-9aca-2ed897305fd1"}; Key k3 = makeByteVec(UUIDDataset); Key r3 = Key.make(); ParseDataset.parse(r3, k3); Frame fr3 = DKV.get(r3).get(); Assert.assertTrue(fr3.numCols() == 1); Assert.assertTrue(fr3.numRows() == 1); Assert.assertTrue(fr3.vecs()[0].isUUID()); fr3.delete(); String[] enumDataset = new String[]{"Foo-bar"}; Key k4 = makeByteVec(enumDataset); Key r4 = Key.make(); ParseDataset.parse(r4, k4); Frame fr4 = DKV.get(r4).get(); Assert.assertTrue(fr4.numCols() == 1); Assert.assertTrue(fr4.numRows() == 1); Assert.assertTrue(fr4.vecs()[0].isEnum()); String[] dom = fr4.vecs()[0].domain(); Assert.assertTrue(dom.length == 1); Assert.assertEquals("Foo-bar", dom[0]); fr4.delete(); } @Test public void testNumberFormats(){ String [] data = {"+.6e102|+.7e102|+.8e102\n.6e102|.7e102|.8e102\n"}; double[][] expDouble = new double[][] { ard(+.6e102,.7e102,.8e102), // preserve order ard(+.6e102, +.7e102,+.8e102), }; for (char separator : SEPARATORS) { String[] dataset = getDataForSeparator(separator, data); Key key = makeByteVec(dataset); Key r = Key.make(); ParseDataset.parse(r, key); testParsed(r, expDouble); } } @Test public void testMultipleNondecimalColumns() { String data[] = { "foo| 2|one\n" + "bar| 4|two\n" + "foo| 6|three\n" + "bar| 8|one\n" + "bar|ten|two\n" + "bar| 12|three\n" + "foobar|14|one\n", }; double[][] expDouble = new double[][] { ard(1, 2, 0), // preserve order ard(0, 4, 2), ard(1, 6, 1), ard(0, 8, 0), ard(0, NaN, 2), ard(0, 12, 1), ard(2, 14, 0), }; for (char separator : SEPARATORS) { String[] dataset = getDataForSeparator(separator, data); Key key = makeByteVec(dataset); Key r = Key.make(); ParseDataset.parse(r, key); Frame fr = DKV.get(r).get(); String[] cd = fr.vecs()[2].domain(); Assert.assertEquals("one",cd[0]); Assert.assertEquals("three",cd[1]); Assert.assertEquals("two",cd[2]); cd = fr.vecs()[0].domain(); Assert.assertEquals("bar",cd[0]); Assert.assertEquals("foo",cd[1]); Assert.assertEquals("foobar",cd[2]); testParsed(r, expDouble); } } // Test if the empty column is correctly handled. // NOTE: this test makes sense only for comma separated columns @Test public void testEmptyColumnValues() { String data[] = { "1,2,3,foo\n" + "4,5,6,bar\n" + "7,,8,\n" + ",9,10\n" + "11,,,\n" + "0,0,0,z\n" + "0,0,0,z\n" + "0,0,0,z\n" + "0,0,0,z\n" + "0,0,0,z\n" }; double[][] expDouble = new double[][] { ard(1, 2, 3, 1), ard(4, 5, 6, 0), ard(7, NaN, 8, NaN), ard(NaN, 9, 10, NaN), ard(11, NaN, NaN, NaN), ard(0, 0, 0, 2), ard(0, 0, 0, 2), ard(0, 0, 0, 2), ard(0, 0, 0, 2), ard(0, 0, 0, 2), }; final char separator = ','; String[] dataset = getDataForSeparator(separator, data); Key key = makeByteVec(dataset); Key r = Key.make(); ParseDataset.parse(r, key); Frame fr = DKV.get(r).get(); String[] cd = fr.vecs()[3].domain(); Assert.assertEquals("bar",cd[0]); Assert.assertEquals("foo",cd[1]); testParsed(r, expDouble); } @Test public void testBasicSpaceAsSeparator() { String[] data = new String[] { " 1|2|3", " 4 | 5 | 6", "4|5.2 ", "asdf|qwer|1", "1.1", "1.1|2.1|3.4", }; double[][] exp = new double[][] { ard(1.0, 2.0, 3.0), ard(4.0, 5.0, 6.0), ard(4.0, 5.2, NaN), ard(NaN, NaN, 1.0), ard(1.1, NaN, NaN), ard(1.1, 2.1, 3.4), }; for (char separator : SEPARATORS) { String[] dataset = getDataForSeparator(separator, data); StringBuilder sb = new StringBuilder(); for( String ds : dataset ) sb.append(ds).append("\n"); Key k = makeByteVec(sb.toString()); Key r5 = Key.make(); ParseDataset.parse(r5, k); testParsed(r5, exp); } } public static String[] getDataForSeparator(char sep, String[] data) { return getDataForSeparator('|', sep, data); } static String[] getDataForSeparator(char placeholder, char sep, String[] data) { String[] result = new String[data.length]; for (int i = 0; i < data.length; i++) { result[i] = data[i].replace(placeholder, sep); } return result; } @Test public void testTimeParse() { Frame fr = parse_test_file("smalldata/junit/bestbuy_train_10k.csv.gz"); fr.delete(); } // TODO Update, originally tested enum to string conversion // TODO now just tests missing values among strings @Test public void testStrings() { Frame fr = null; try { fr = parse_test_file("smalldata/junit/string_test.csv"); //check dimensions int nlines = (int)fr.numRows(); Assert.assertEquals(65005,nlines); Assert.assertEquals(7,fr.numCols()); //check column types Vec[] vecs = fr.vecs(); Assert.assertTrue(vecs[0].isString()); Assert.assertTrue(vecs[1].isString()); Assert.assertTrue(vecs[2].isString()); Assert.assertTrue(vecs[3].isString()); Assert.assertTrue(vecs[4].isString()); Assert.assertTrue(vecs[5].isString()); Assert.assertTrue(vecs[6].isString()); //checks column counts - expects MAX_ENUM == 65000 //Categorical registration is racy so actual enum limit can exceed MAX by a few values Assert.assertTrue(65003 <= vecs[0].nzCnt()); //ColV2 A lacks starting values Assert.assertTrue(65002 <= vecs[1].nzCnt()); //ColV2 B has random missing values & dble quotes Assert.assertTrue(65005 <= vecs[2].nzCnt()); //ColV2 C has all values & single quotes Assert.assertTrue(65002 <= vecs[3].nzCnt()); //ColV2 D missing vals just prior to Categorical limit Assert.assertTrue(65003 <= vecs[4].nzCnt()); //ColV2 E missing vals just after Categorical limit hit //Assert.assertTrue(65000 <= vecs[5].domain().length); //ColV2 F cardinality just at Categorical limit Assert.assertTrue(65003 <= vecs[6].nzCnt()); //ColV2 G missing final values //spot check value parsing ValueString vs = new ValueString(); Assert.assertEquals("A2", vecs[0].atStr(vs, 2).toString()); Assert.assertEquals("B7", vecs[1].atStr(vs, 7).toString()); Assert.assertEquals("'C65001'", vecs[2].atStr(vs, 65001).toString()); Assert.assertEquals("E65004", vecs[4].atStr(vs, 65004).toString()); Assert.assertNull(vecs[6].atStr(vs, 65004)); fr.delete(); } finally { if( fr != null ) fr.delete(); } } @Test public void testMixedSeps() { double[][] exp = new double[][] { ard(NaN, 1, 1), ard(NaN, 2, NaN), ard( 3, NaN, 3), ard( 4, NaN, NaN), ard(NaN, NaN, NaN), ard(NaN, NaN, NaN), ard(NaN, NaN, 6), }; Frame fr = parse_test_file("smalldata/junit/is_NA.csv"); testParsed(fr._key,exp, 25); } @Test public void testSVMLight() { String[] dataset = new String[] { // " 1 2:.2 5:.5 9:.9\n", // "-1 7:.7 8:.8 9:.9\n", // "+1 1:.1 5:.5 6:.6\n" "1 2:.2 5:.5 9:.9\n-1 1:.1 4:.4 8:.8\n", "1 2:.2 5:.5 9:.9\n1 3:.3 6:.6\n", "-1 7:.7 8:.8 9:.9\n1 20:2.\n", "+1 1:.1 5:.5 6:.6 10:1\n1 19:1.9\n", "1 2:.2 5:.5 9:.9\n-1 1:.1 4:.4 8:.8\n", "1 2:.2 5:.5 9:.9\n1 3:.3 6:.6\n", "-1 7:.7 8:.8 9:.9\n1 20:2.\n", "+1 1:.1 5:.5 6:.6 10:1\n1 19:1.9\n", "1 2:.2 5:.5 9:.9\n-1 1:.1 4:.4 8:.8\n", "1 2:.2 5:.5 9:.9\n1 3:.3 6:.6\n", "-1 7:.7 8:.8 9:.9\n1 20:2.\n", "+1 1:.1 5:.5 6:.6 10:1\n1 19:1.9\n" }; double[][] exp = new double[][] { // ard( 1., .0, .2, .0, .0, .5, .0, .0, .0, .9), // ard(-1., .0, .0, .0, .0, .0, .0, .7, .8, .9), // ard( 1., .1, .0, .0, .0, .5, .6, .0, .0, .0), ard( 1., .0, .2, .0, .0, .5, .0, .0, .0, .9, 0, 0, 0, 0, 0, 0, 0, 0, 0, .0, .0), ard( -1., .1, .0, .0, .4, .0, .0, .0, .8, .0, 0, 0, 0, 0, 0, 0, 0, 0, 0, .0, .0), ard( 1., .0, .2, .0, .0, .5, .0, .0, .0, .9, 0, 0, 0, 0, 0, 0, 0, 0, 0, .0, .0), ard( 1., .0, .0, .3, .0, .0, .6, .0, .0, .0, 0, 0, 0, 0, 0, 0, 0, 0, 0, .0, .0), ard( -1., .0, .0, .0, .0, .0, .0, .7, .8, .9, 0, 0, 0, 0, 0, 0, 0, 0, 0, .0, .0), ard( 1., .0, .0, .0, .0, .0, .0, .0, .0, .0, 0, 0, 0, 0, 0, 0, 0, 0, 0, .0,2.0), ard( 1., .1, .0, .0, .0, .5, .6, .0, .0, .0, 1, 0, 0, 0, 0, 0, 0, 0, 0, .0, .0), ard( 1., .0, .0, .0, .0, .0, .0, .0, .0, .0, 0, 0, 0, 0, 0, 0, 0, 0, 0,1.9, .0), ard( 1., .0, .2, .0, .0, .5, .0, .0, .0, .9, 0, 0, 0, 0, 0, 0, 0, 0, 0, .0, .0), ard( -1., .1, .0, .0, .4, .0, .0, .0, .8, .0, 0, 0, 0, 0, 0, 0, 0, 0, 0, .0, .0), ard( 1., .0, .2, .0, .0, .5, .0, .0, .0, .9, 0, 0, 0, 0, 0, 0, 0, 0, 0, .0, .0), ard( 1., .0, .0, .3, .0, .0, .6, .0, .0, .0, 0, 0, 0, 0, 0, 0, 0, 0, 0, .0, .0), ard( -1., .0, .0, .0, .0, .0, .0, .7, .8, .9, 0, 0, 0, 0, 0, 0, 0, 0, 0, .0, .0), ard( 1., .0, .0, .0, .0, .0, .0, .0, .0, .0, 0, 0, 0, 0, 0, 0, 0, 0, 0, .0,2.0), ard( 1., .1, .0, .0, .0, .5, .6, .0, .0, .0, 1, 0, 0, 0, 0, 0, 0, 0, 0, .0, .0), ard( 1., .0, .0, .0, .0, .0, .0, .0, .0, .0, 0, 0, 0, 0, 0, 0, 0, 0, 0,1.9, .0), ard( 1., .0, .2, .0, .0, .5, .0, .0, .0, .9, 0, 0, 0, 0, 0, 0, 0, 0, 0, .0, .0), ard( -1., .1, .0, .0, .4, .0, .0, .0, .8, .0, 0, 0, 0, 0, 0, 0, 0, 0, 0, .0, .0), ard( 1., .0, .2, .0, .0, .5, .0, .0, .0, .9, 0, 0, 0, 0, 0, 0, 0, 0, 0, .0, .0), ard( 1., .0, .0, .3, .0, .0, .6, .0, .0, .0, 0, 0, 0, 0, 0, 0, 0, 0, 0, .0, .0), ard( -1., .0, .0, .0, .0, .0, .0, .7, .8, .9, 0, 0, 0, 0, 0, 0, 0, 0, 0, .0, .0), ard( 1., .0, .0, .0, .0, .0, .0, .0, .0, .0, 0, 0, 0, 0, 0, 0, 0, 0, 0, .0,2.0), ard( 1., .1, .0, .0, .0, .5, .6, .0, .0, .0, 1, 0, 0, 0, 0, 0, 0, 0, 0, .0, .0), ard( 1., .0, .0, .0, .0, .0, .0, .0, .0, .0, 0, 0, 0, 0, 0, 0, 0, 0, 0,1.9, .0), }; StringBuilder sb = new StringBuilder(); for( String ds : dataset ) sb.append(ds).append("\n"); Key k = makeByteVec(sb.toString()); Key r1 = Key.make("r1"); ParseDataset.parse(r1, k); testParsed(r1,exp); } // Mix of NA's, very large & very small, ^A Hive-style seperator, comments, labels @Test public void testParseMix() { double[][] exp = new double[][] { ard( 0 , 0.5 , 1 , 0), ard( 3 , NaN , 4 , 1), ard( 6 , NaN , 8 , 0), ard( 0.6 , 0.7 , 0.8 , 1), ard(+0.6 , +0.7 , +0.8 , 0), ard(-0.6 , -0.7 , -0.8 , 1), ard( .6 , .7 , .8 , 0), ard(+ .6 , +.7 , +.8 , 1), ard(- .6 , -.7 , -.8 , 0), ard(+0.6e0 , +0.7e0 , +0.8e0 , 1), ard(-0.6e0 , -0.7e0 , -0.8e0 , 0), ard( .6e0 , .7e0 , .8e0 , 1), ard(+ .6e0 , +.7e0 , +.8e0 , 0), ard( -.6e0 , -.7e0 , -.8e0 , 1), ard(+0.6e00 , +0.7e00 , +0.8e00 , 0), ard(-0.6e00 , -0.7e00 , -0.8e00 , 1), ard( .6e00 , .7e00 , .8e00 , 0), ard( +.6e00 , +.7e00 , +.8e00 , 1), ard( -.6e00 , -.7e00 , -.8e00 , 0), ard(+0.6e-01, +0.7e-01, +0.8e-01, 1), ard(-0.6e-01, -0.7e-01, -0.8e-01, 0), ard( .6e-01, .7e-01, .8e-01, 1), ard( +.6e-01, +.7e-01, +.8e-01, 0), ard( -.6e-01, -.7e-01, -.8e-01, 1), ard(+0.6e+01, +0.7e+01, +0.8e+01, 0), ard(-0.6e+01, -0.7e+01, -0.8e+01, 1), ard( .6e+01, .7e+01, .8e+01, 0), ard( +.6e+01, +.7e+01, +.8e+01, 1), ard( -.6e+01, -.7e+01, -.8e+01, 0), ard(+0.6e102, +0.7e102, +0.8e102, 1), ard(-0.6e102, -0.7e102, -0.8e102, 0), ard( .6e102, .7e102, .8e102, 1), ard( +.6e102, +.7e102, +.8e102, 0), ard( -.6e102, -.7e102, -.8e102, 1) }; Frame fr = parse_test_file("smalldata/junit/test_parse_mix.csv"); testParsed(fr._key, exp); } // Test of parsing numbers with many digits @Test public void testParseManyDigits1() { String pows10 = "1\n"+ "10\n"+ "100\n"+ "1000\n"+ "10000\n"+ "100000\n"+ "1000000\n"+ "10000000\n"+ "100000000\n"+ "1000000000\n"+ "10000000000\n"+ "100000000000\n"+ "1000000000000\n"+ "10000000000000\n"+ "100000000000000\n"+ "1000000000000000\n"+ "10000000000000000\n"+ "100000000000000000\n"+ "1000000000000000000\n"+ "10000000000000000000\n"+ "100000000000000000000\n"+ "1000000000000000000000\n"+ "10000000000000000000000\n"+ "100000000000000000000000\n"; double[][] pows10_exp = new double[][] { ard(1e0 ), ard(1e1 ), ard(1e2 ), ard(1e3 ), ard(1e4 ), ard(1e5 ), ard(1e6 ), ard(1e7 ), ard(1e8 ), ard(1e9 ), ard(1e10), ard(1e11), ard(1e12), ard(1e13), ard(1e14), ard(1e15), ard(1e16), ard(1e17), ard(1e18), ard(1e19), ard(1e20), ard(1e21), ard(1e22), ard(1e23), }; Key k = makeByteVec(pows10); Key r1 = Key.make("r1"); ParseDataset.parse(r1, k); testParsed(r1,pows10_exp); } // Test of parsing numbers with many digits @Test public void testParseManyDigits2() { String pows10 = "9\n"+ "99\n"+ "999\n"+ "9999\n"+ "99999\n"+ "999999\n"+ "9999999\n"+ "99999999\n"+ "999999999\n"+ "9999999999\n"+ "99999999999\n"+ "999999999999\n"+ "9999999999999\n"+ "99999999999999\n"+ "999999999999999\n"+ "9999999999999999\n"+ "99999999999999999\n"+ "999999999999999999\n"+ "9999999999999999999\n"+ "99999999999999999999\n"+ "999999999999999999999\n"+ "9999999999999999999999\n"+ "99999999999999999999999\n"+ "999999999999999999999999\n"; double[][] pows10_exp = new double[][] { ard(9L), ard(99L), ard(999L), ard(9999L), ard(99999L), ard(999999L), ard(9999999L), ard(99999999L), ard(999999999L), ard(9999999999L), ard(99999999999L), ard(999999999999L), ard(9999999999999L), ard(99999999999999L), ard(999999999999999L), ard(9999999999999999L), ard(99999999999999999L), ard(999999999999999999L), ard(9.99999999999999999e18), ard(9.99999999999999999e19), ard(9.99999999999999999e20), ard(9.99999999999999999e21), ard(9.99999999999999999e22), ard(9.99999999999999999e23), }; Key k = makeByteVec(pows10); Key r1 = Key.make("r1"); ParseDataset.parse(r1, k); testParsed(r1,pows10_exp); } // Test of parsing numbers with many digits @Test public void testParseManyDigits3() { String pows10 = "0.00000000000001\n"+ "1000001\n"+ "2000001\n"+ ""; double[][] pows10_exp = new double[][] { ard(1e-14), ard(1000001L), ard(2000001L), }; Key k = makeByteVec(pows10); Key r1 = Key.make("r1"); ParseDataset.parse(r1, k); testParsed(r1,pows10_exp); } // Test of parsing numbers with many digits @Test public void testParseManyDigits4() { String pows10 = "3\n"+ "1e-18\n"+ "1e-34\n"+ ""; double[][] pows10_exp = new double[][] { ard(3), ard(1e-18), ard(1e-34), }; Key k = makeByteVec(pows10); Key r1 = Key.make("r1"); ParseDataset.parse(r1, k); testParsed(r1,pows10_exp); } // if there's only 3 different things - 2 strings and one other things (number of string), then declare this column an enum column @Test @Ignore public void testBinaryWithNA() { String[] data = new String[] { "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "T", "F", "0", }; double[][] exp = new double[][] { ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(Double.NaN), ard(1), ard(0), ard(Double.NaN), }; for (char separator : SEPARATORS) { String[] dataset = getDataForSeparator(separator, data); StringBuilder sb1 = new StringBuilder(); for( String ds : dataset ) sb1.append(ds).append("\n"); Key k1 = makeByteVec(sb1.toString()); Key r1 = Key.make("r1"); ParseDataset.parse(r1, k1); testParsed(r1,exp); StringBuilder sb2 = new StringBuilder(); for( String ds : dataset ) sb2.append(ds).append("\r\n"); Key k2 = makeByteVec(sb2.toString()); Key r2 = Key.make("r2"); ParseDataset.parse(r2, k2); testParsed(r2,exp); } } @Test public void testParseAll() { String[] files = new String[]{ "smalldata/./airlines/allyears2k_headers.zip", "smalldata/./covtype/covtype.20k.data", "smalldata/./iris/iris.csv", "smalldata/./iris/iris_wheader.csv", "smalldata/./junit/benign.xls", "smalldata/./junit/bestbuy_train_10k.csv.gz", "smalldata/./junit/cars.csv", "smalldata/./junit/iris.csv", "smalldata/./junit/iris.csv.gz", "smalldata/./junit/iris.csv.zip", "smalldata/./junit/iris.xls", "smalldata/./junit/is_NA.csv", "smalldata/./junit/one-line-dataset-0.csv", "smalldata/./junit/one-line-dataset-1dos.csv", "smalldata/./junit/one-line-dataset-1unix.csv", "smalldata/./junit/one-line-dataset-2dos.csv", "smalldata/./junit/one-line-dataset-2unix.csv", "smalldata/./junit/parse_folder/prostate_0.csv", "smalldata/./junit/parse_folder/prostate_1.csv", "smalldata/./junit/parse_folder/prostate_2.csv", "smalldata/./junit/parse_folder/prostate_3.csv", "smalldata/./junit/parse_folder/prostate_4.csv", "smalldata/./junit/parse_folder/prostate_5.csv", "smalldata/./junit/parse_folder/prostate_6.csv", "smalldata/./junit/parse_folder/prostate_7.csv", "smalldata/./junit/parse_folder/prostate_8.csv", "smalldata/./junit/parse_folder/prostate_9.csv", "smalldata/./junit/parse_folder_gold.csv", "smalldata/./junit/pros.xls", "smalldata/./junit/syn_2659x1049.csv.gz", "smalldata/./junit/test_parse_mix.csv", "smalldata/./junit/test_quote.csv", "smalldata/./junit/test_time.csv", "smalldata/./junit/test_uuid.csv", "smalldata/./junit/time.csv", "smalldata/./junit/two-lines-dataset.csv", "smalldata/./junit/ven-11.csv", "smalldata/./logreg/prostate.csv", }; for (String f : files) { for (boolean delete_on_done : new boolean[]{ true, // false }) { for (int check_header : new int[]{ ParseSetup.GUESS_HEADER, // ParseSetup.HAS_HEADER }) { try { Log.info("Trying to parse " + f); NFSFileVec nfs = NFSFileVec.make(find_test_file(f)); Frame fr = ParseDataset.parse(Key.make(), new Key[]{nfs._key}, delete_on_done, true /*single quote*/, check_header); fr.delete(); } catch (Throwable t) { Log.throwErr(t); } } } } } @Ignore //PUBDEV-1384 fails for cloudsize > 1 @Test public void parseMNIST() { File train = find_test_file("bigdata/laptop/mnist/train.csv.gz"); if (train != null) { NFSFileVec trainfv = NFSFileVec.make(train); Frame frame = ParseDataset.parse(Key.make(), trainfv._key); frame.delete(); } } }
package org.activiti.engine.impl.history; import java.util.Date; import java.util.Map; import org.activiti.engine.impl.db.DbSqlSession; import org.activiti.engine.impl.interceptor.Session; import org.activiti.engine.impl.persistence.entity.ExecutionEntity; import org.activiti.engine.impl.persistence.entity.HistoricActivityInstanceEntity; import org.activiti.engine.impl.persistence.entity.IdentityLinkEntity; import org.activiti.engine.impl.persistence.entity.TaskEntity; import org.activiti.engine.impl.persistence.entity.VariableInstanceEntity; import org.activiti.engine.impl.pvm.runtime.InterpretableExecution; import org.flowable.engine.common.impl.history.HistoryLevel; import org.flowable.identitylink.api.IdentityLink; public interface HistoryManager extends Session { /** * @return true, if the configured history-level is equal to OR set to a higher value than the given level. */ public abstract boolean isHistoryLevelAtLeast(HistoryLevel level); /** * @return true, if history-level is configured to level other than "none". */ public abstract boolean isHistoryEnabled(); /** * Record a process-instance ended. Updates the historic process instance if activity history is enabled. */ public abstract void recordProcessInstanceEnd(String processInstanceId, String deleteReason, String activityId); /** * Record a process-instance started and record start-event if activity history is enabled. */ public abstract void recordProcessInstanceStart( ExecutionEntity processInstance); /** * Record a process-instance name change. */ public abstract void recordProcessInstanceNameChange( String processInstanceId, String newName); /** * Record a sub-process-instance started and alters the calledProcessinstanceId on the current active activity's historic counterpart. Only effective when activity history is enabled. */ public abstract void recordSubProcessInstanceStart( ExecutionEntity parentExecution, ExecutionEntity subProcessInstance); /** * Record the start of an activity, if activity history is enabled. */ public abstract void recordActivityStart(ExecutionEntity executionEntity); /** * Record the end of an activity, if activity history is enabled. */ public abstract void recordActivityEnd(ExecutionEntity executionEntity); /** * Record the end of a start-task, if activity history is enabled. */ public abstract void recordStartEventEnded(ExecutionEntity execution, String activityId); /** * Finds the {@link HistoricActivityInstanceEntity} that is active in the given execution. Uses the {@link DbSqlSession} cache to make sure the right instance is returned, regardless of whether or * not entities have already been flushed to DB. */ public abstract HistoricActivityInstanceEntity findActivityInstance( ExecutionEntity execution); /** * Replaces any open historic activityInstances' execution-id's to the id of the replaced execution, if activity history is enabled. */ public abstract void recordExecutionReplacedBy(ExecutionEntity execution, InterpretableExecution replacedBy); /** * Record a change of the process-definition id of a process instance, if activity history is enabled. */ public abstract void recordProcessDefinitionChange( String processInstanceId, String processDefinitionId); /** * Record the creation of a task, if audit history is enabled. */ public abstract void recordTaskCreated(TaskEntity task, ExecutionEntity execution); /** * Record the assignment of task, if activity history is enabled. */ public abstract void recordTaskAssignment(TaskEntity task); /** * record task instance claim time, if audit history is enabled * * @param taskId */ public abstract void recordTaskClaim(String taskId); /** * Record the id of a the task associated with a historic activity, if activity history is enabled. */ public abstract void recordTaskId(TaskEntity task); /** * Record task as ended, if audit history is enabled. */ public abstract void recordTaskEnd(String taskId, String deleteReason); /** * Record task assignee change, if audit history is enabled. */ public abstract void recordTaskAssigneeChange(String taskId, String assignee); /** * Record task owner change, if audit history is enabled. */ public abstract void recordTaskOwnerChange(String taskId, String owner); /** * Record task name change, if audit history is enabled. */ public abstract void recordTaskNameChange(String taskId, String taskName); /** * Record task description change, if audit history is enabled. */ public abstract void recordTaskDescriptionChange(String taskId, String description); /** * Record task due date change, if audit history is enabled. */ public abstract void recordTaskDueDateChange(String taskId, Date dueDate); /** * Record task priority change, if audit history is enabled. */ public abstract void recordTaskPriorityChange(String taskId, int priority); /** * Record task category change, if audit history is enabled. */ public abstract void recordTaskCategoryChange(String taskId, String category); /** * Record task form key change, if audit history is enabled. */ public abstract void recordTaskFormKeyChange(String taskId, String formKey); /** * Record task parent task id change, if audit history is enabled. */ public abstract void recordTaskParentTaskIdChange(String taskId, String parentTaskId); /** * Record task execution id change, if audit history is enabled. */ public abstract void recordTaskExecutionIdChange(String taskId, String executionId); /** * Record task definition key change, if audit history is enabled. */ public abstract void recordTaskDefinitionKeyChange(TaskEntity task, String taskDefinitionKey); /** * Record a change of the process-definition id of a task instance, if activity history is enabled. */ public abstract void recordTaskProcessDefinitionChange(String taskId, String processDefinitionId); /** * Record a variable has been created, if audit history is enabled. */ public abstract void recordVariableCreate(VariableInstanceEntity variable); /** * Record a variable has been created, if audit history is enabled. */ public abstract void recordHistoricDetailVariableCreate( VariableInstanceEntity variable, ExecutionEntity sourceActivityExecution, boolean useActivityId); /** * Record a variable has been updated, if audit history is enabled. */ public abstract void recordVariableUpdate(VariableInstanceEntity variable); /** * Record a variable has been deleted, if audit history is enabled. */ public abstract void recordVariableRemoved(VariableInstanceEntity variable); /** * Creates a new comment to indicate a new {@link IdentityLink} has been created or deleted, if history is enabled. */ public abstract void createIdentityLinkComment(String taskId, String userId, String groupId, String type, boolean create); /** * Creates a new comment to indicate a new user {@link IdentityLink} has been created or deleted, if history is enabled. */ public abstract void createUserIdentityLinkComment(String taskId, String userId, String type, boolean create); /** * Creates a new comment to indicate a new group {@link IdentityLink} has been created or deleted, if history is enabled. */ public abstract void createGroupIdentityLinkComment(String taskId, String groupId, String type, boolean create); /** * Creates a new comment to indicate a new {@link IdentityLink} has been created or deleted, if history is enabled. */ public abstract void createIdentityLinkComment(String taskId, String userId, String groupId, String type, boolean create, boolean forceNullUserId); /** * Creates a new comment to indicate a new user {@link IdentityLink} has been created or deleted, if history is enabled. */ public abstract void createUserIdentityLinkComment(String taskId, String userId, String type, boolean create, boolean forceNullUserId); /** * Creates a new comment to indicate a new {@link IdentityLink} has been created or deleted, if history is enabled. */ public abstract void createProcessInstanceIdentityLinkComment(String processInstanceId, String userId, String groupId, String type, boolean create); /** * Creates a new comment to indicate a new {@link IdentityLink} has been created or deleted, if history is enabled. */ public abstract void createProcessInstanceIdentityLinkComment(String processInstanceId, String userId, String groupId, String type, boolean create, boolean forceNullUserId); /** * Creates a new comment to indicate a new attachment has been created or deleted, if history is enabled. */ public abstract void createAttachmentComment(String taskId, String processInstanceId, String attachmentName, boolean create); /** * Report form properties submitted, if audit history is enabled. */ public abstract void reportFormPropertiesSubmitted( ExecutionEntity processInstance, Map<String, String> properties, String taskId); // Identity link related history /** * Record the creation of a new {@link IdentityLink}, if audit history is enabled. */ public abstract void recordIdentityLinkCreated( IdentityLinkEntity identityLink); public abstract void deleteHistoricIdentityLink(String id); public abstract void updateProcessBusinessKeyInHistory( ExecutionEntity processInstance); }
package denominator.route53; import com.squareup.okhttp.mockwebserver.MockResponse; import org.junit.Rule; import org.junit.Test; import java.util.Iterator; import denominator.ZoneApi; import denominator.model.Zone; import static denominator.assertj.ModelAssertions.assertThat; public class Route53ZoneApiMockTest { @Rule public MockRoute53Server server = new MockRoute53Server(); @Test public void iteratorWhenPresent() throws Exception { server.enqueue(new MockResponse().setBody( "<ListHostedZonesResponse>\n" + " <HostedZones>\n" + " <HostedZone>\n" + " <Id>/hostedzone/Z1PA6795UKMFR9</Id>\n" + " <Name>denominator.io.</Name>\n" + " <CallerReference>denomination</CallerReference>\n" + " <Config>\n" + " <Comment>no comment</Comment>\n" + " </Config>\n" + " <ResourceRecordSetCount>17</ResourceRecordSetCount>\n" + " </HostedZone>\n" + " </HostedZones>\n" + "</ListHostedZonesResponse>")); server.enqueue(new MockResponse().setBody( "<?xml version=\"1.0\"?>\n" + "<ListResourceRecordSetsResponse xmlns=\"https://route53.amazonaws.com/doc/2012-12-12/\">\n" + " <ResourceRecordSets>\n" + soaRRSet + " </ResourceRecordSets>\n" + " <IsTruncated>false</IsTruncated>\n" + " <MaxItems>100</MaxItems>\n" + "</ListResourceRecordSetsResponse>" )); ZoneApi api = server.connect().api().zones(); Iterator<Zone> domains = api.iterator(); assertThat(domains).containsExactly( Zone.create("Z1PA6795UKMFR9", "denominator.io.", 3601, "test@denominator.io") ); server.assertRequest() .hasMethod("GET") .hasPath("/2012-12-12/hostedzone"); server.assertRequest() .hasMethod("GET") .hasPath("/2012-12-12/hostedzone/Z1PA6795UKMFR9/rrset?name=denominator.io.&type=SOA"); } @Test public void iteratorWhenAbsent() throws Exception { server.enqueue(new MockResponse().setBody( "<ListHostedZonesResponse><HostedZones /></ListHostedZonesResponse>")); ZoneApi api = server.connect().api().zones(); assertThat(api.iterator()).isEmpty(); server.assertRequest() .hasMethod("GET") .hasPath("/2012-12-12/hostedzone"); } @Test public void iterateByNameWhenPresent() throws Exception { server.enqueue(new MockResponse().setBody( "<ListHostedZonesByNameResponse xmlns=\"https://route53.amazonaws.com/doc/2013-04-01/\">\n" + " <HostedZones>\n" + " <HostedZone>\n" + " <Id>/hostedzone/Z2ZEEJCUZCVG56</Id>\n" + " <Name>denominator.io.</Name>\n" + " <CallerReference>Foo</CallerReference>\n" + " <Config>\n" + " <PrivateZone>false</PrivateZone>\n" + " </Config>\n" + " <ResourceRecordSetCount>3</ResourceRecordSetCount>\n" + " </HostedZone>\n" + " <HostedZone>\n" + " <Id>/hostedzone/Z3OQLQGABCU3T</Id>\n" + " <Name>denominator.io.</Name>\n" + " <CallerReference>Bar</CallerReference>\n" + " <Config>\n" + " <PrivateZone>false</PrivateZone>\n" + " </Config>\n" + " <ResourceRecordSetCount>2</ResourceRecordSetCount>\n" + " </HostedZone>\n" + " </HostedZones>\n" + " <DNSName>denominator.io.</DNSName>\n" + " <IsTruncated>false</IsTruncated>\n" + " <MaxItems>100</MaxItems>\n" + "</ListHostedZonesByNameResponse>")); server.enqueue(new MockResponse().setBody( "<?xml version=\"1.0\"?>\n" + "<ListResourceRecordSetsResponse xmlns=\"https://route53.amazonaws.com/doc/2012-12-12/\">\n" + " <ResourceRecordSets>\n" + soaRRSet + " </ResourceRecordSets>\n" + " <IsTruncated>false</IsTruncated>\n" + " <MaxItems>100</MaxItems>\n" + "</ListResourceRecordSetsResponse>" )); server.enqueue(new MockResponse().setBody( "<?xml version=\"1.0\"?>\n" + "<ListResourceRecordSetsResponse xmlns=\"https://route53.amazonaws.com/doc/2012-12-12/\">\n" + " <ResourceRecordSets>\n" + soaRRSet + " </ResourceRecordSets>\n" + " <IsTruncated>false</IsTruncated>\n" + " <MaxItems>100</MaxItems>\n" + "</ListResourceRecordSetsResponse>" )); ZoneApi api = server.connect().api().zones(); assertThat(api.iterateByName("denominator.io.")).containsExactly( Zone.create("Z2ZEEJCUZCVG56", "denominator.io.", 3601, "test@denominator.io"), Zone.create("Z3OQLQGABCU3T", "denominator.io.", 3601, "test@denominator.io") ); server.assertRequest() .hasMethod("GET") .hasPath("/2013-04-01/hostedzonesbyname?dnsname=denominator.io."); server.assertRequest() .hasMethod("GET") .hasPath("/2012-12-12/hostedzone/Z2ZEEJCUZCVG56/rrset?name=denominator.io.&type=SOA"); server.assertRequest() .hasMethod("GET") .hasPath("/2012-12-12/hostedzone/Z3OQLQGABCU3T/rrset?name=denominator.io.&type=SOA"); } /** * Route53 list by name is only about order. We need to ensure we skip irrelevant zones. */ @Test public void iterateByNameWhenIrrelevant() throws Exception { server.enqueue(new MockResponse().setBody( "<ListHostedZonesByNameResponse xmlns=\"https://route53.amazonaws.com/doc/2013-04-01/\">\n" + " <HostedZones>\n" + " <HostedZone>\n" + " <Id>/hostedzone/Z2ZEEJCUZCVG56</Id>\n" + " <Name>denominator.io.</Name>\n" + " <CallerReference>Foo</CallerReference>\n" + " <Config>\n" + " <PrivateZone>false</PrivateZone>\n" + " </Config>\n" + " <ResourceRecordSetCount>3</ResourceRecordSetCount>\n" + " </HostedZone>\n" + " </HostedZones>\n" + " <DNSName>denominator.io.</DNSName>\n" + " <IsTruncated>false</IsTruncated>\n" + " <MaxItems>100</MaxItems>\n" + "</ListHostedZonesByNameResponse>")); ZoneApi api = server.connect().api().zones(); assertThat(api.iterateByName("denominator.com.")).isEmpty(); server.assertRequest() .hasMethod("GET") .hasPath("/2013-04-01/hostedzonesbyname?dnsname=denominator.com."); } @Test public void iterateByNameWhenAbsent() throws Exception { server.enqueue(new MockResponse().setBody( "<ListHostedZonesByNameResponse><HostedZones /></ListHostedZonesByNameResponse>")); ZoneApi api = server.connect().api().zones(); assertThat(api.iterateByName("denominator.io.")).isEmpty(); server.assertRequest() .hasMethod("GET") .hasPath("/2013-04-01/hostedzonesbyname?dnsname=denominator.io."); } @Test public void putWhenAbsent() throws Exception { server.enqueue(new MockResponse().setBody( "<CreateHostedZoneResponse xmlns=\"https://route53.amazonaws.com/doc/2012-12-12/\">\n" + " <HostedZone>\n" + " <Id>/hostedzone/Z1PA6795UKMFR9</Id>\n" + " <Name>denominator.io.</Name>\n" + " <CallerReference>a228ebcc-0c93-4627-8fff-1b899f5de2a4</CallerReference>\n" + " <Config/>\n" + " <ResourceRecordSetCount>2</ResourceRecordSetCount>\n" + " </HostedZone>\n" + " <ChangeInfo>\n" + " <Id>/change/C1DMRYCM7MK76K</Id>\n" + " <Status>PENDING</Status>\n" + " <SubmittedAt>2015-04-04T02:50:41.602Z</SubmittedAt>\n" + " </ChangeInfo>\n" + " <DelegationSet>\n" + " <NameServers>\n" + " <NameServer>ns-534.awsdns-02.net</NameServer>\n" + " <NameServer>ns-448.awsdns-56.com</NameServer>\n" + " <NameServer>ns-1296.awsdns-34.org</NameServer>\n" + " <NameServer>ns-1725.awsdns-23.co.uk</NameServer>\n" + " </NameServers>\n" + " </DelegationSet>\n" + "</CreateHostedZoneResponse>" )); server.enqueue(new MockResponse().setBody( "<ListResourceRecordSetsResponse xmlns=\"https://route53.amazonaws.com/doc/2012-12-12/\">\n" + " <ResourceRecordSets>\n" + initialSOA + " </ResourceRecordSets>\n" + " <IsTruncated>false</IsTruncated>\n" + " <MaxItems>100</MaxItems>\n" + "</ListResourceRecordSetsResponse>" )); server.enqueue(changingRRSets); ZoneApi api = server.connect().api().zones(); Zone zone = Zone.create(null, "denominator.io.", 3601, "test@denominator.io"); assertThat(api.put(zone)).isEqualTo("Z1PA6795UKMFR9"); server.assertRequest() .hasMethod("POST") .hasPath("/2012-12-12/hostedzone"); server.assertRequest() .hasMethod("GET") .hasPath("/2012-12-12/hostedzone/Z1PA6795UKMFR9/rrset?name=denominator.io.&type=SOA"); server.assertRequest() .hasMethod("POST") .hasPath("/2012-12-12/hostedzone/Z1PA6795UKMFR9/rrset") .hasXMLBody( "<ChangeResourceRecordSetsRequest xmlns=\"https://route53.amazonaws.com/doc/2012-12-12/\">\n" + " <ChangeBatch>\n" + " <Changes>\n" + " <Change>\n" + " <Action>DELETE</Action>\n" + initialSOA + " </Change>\n" + " <Change>\n" + " <Action>CREATE</Action>\n" + soaRRSet + " </Change>\n" + " </Changes>\n" + " </ChangeBatch>\n" + "</ChangeResourceRecordSetsRequest>"); } @Test public void putWhenPresent_changingSOA() throws Exception { server.enqueue(new MockResponse().setBody( "<ListResourceRecordSetsResponse xmlns=\"https://route53.amazonaws.com/doc/2012-12-12/\">\n" + " <ResourceRecordSets>\n" + initialSOA + " </ResourceRecordSets>\n" + " <IsTruncated>false</IsTruncated>\n" + " <MaxItems>100</MaxItems>\n" + "</ListResourceRecordSetsResponse>" )); server.enqueue(changingRRSets); ZoneApi api = server.connect().api().zones(); Zone zone = Zone.create("Z1PA6795UKMFR9", "denominator.io.", 3601, "test@denominator.io"); assertThat(api.put(zone)).isEqualTo(zone.id()); server.assertRequest() .hasMethod("GET") .hasPath("/2012-12-12/hostedzone/Z1PA6795UKMFR9/rrset?name=denominator.io.&type=SOA"); server.assertRequest() .hasMethod("POST") .hasPath("/2012-12-12/hostedzone/Z1PA6795UKMFR9/rrset") .hasXMLBody( "<ChangeResourceRecordSetsRequest xmlns=\"https://route53.amazonaws.com/doc/2012-12-12/\">\n" + " <ChangeBatch>\n" + " <Changes>\n" + " <Change>\n" + " <Action>DELETE</Action>\n" + initialSOA + " </Change>\n" + " <Change>\n" + " <Action>CREATE</Action>\n" + soaRRSet + " </Change>\n" + " </Changes>\n" + " </ChangeBatch>\n" + "</ChangeResourceRecordSetsRequest>"); } @Test public void putWhenPresent_noop() throws Exception { server.enqueue(new MockResponse().setBody( "<ListResourceRecordSetsResponse xmlns=\"https://route53.amazonaws.com/doc/2012-12-12/\">\n" + " <ResourceRecordSets>\n" + soaRRSet + " </ResourceRecordSets>\n" + " <IsTruncated>false</IsTruncated>\n" + " <MaxItems>100</MaxItems>\n" + "</ListResourceRecordSetsResponse>" )); ZoneApi api = server.connect().api().zones(); Zone zone = Zone.create("Z1PA6795UKMFR9", "denominator.io.", 3601, "test@denominator.io"); assertThat(api.put(zone)).isEqualTo(zone.id()); server.assertRequest() .hasMethod("GET") .hasPath("/2012-12-12/hostedzone/Z1PA6795UKMFR9/rrset?name=denominator.io.&type=SOA"); } @Test public void deleteWhenPresent() throws Exception { server.enqueue(oneZone); server.enqueue(new MockResponse().setBody( "<ListResourceRecordSetsResponse xmlns=\"https://route53.amazonaws.com/doc/2012-12-12/\">\n" + " <ResourceRecordSets>\n" + nsRRSet + soaRRSet + " </ResourceRecordSets>\n" + " <IsTruncated>false</IsTruncated>\n" + " <MaxItems>2</MaxItems>\n" + "</ListResourceRecordSetsResponse>" )); server.enqueue(deletingZone); ZoneApi api = server.connect().api().zones(); api.delete("Z1PA6795UKMFR9"); server.assertRequest() .hasMethod("GET") .hasPath("/2012-12-12/hostedzone/Z1PA6795UKMFR9"); server.assertRequest() .hasMethod("GET") .hasPath("/2012-12-12/hostedzone/Z1PA6795UKMFR9/rrset"); server.assertRequest() .hasMethod("DELETE") .hasPath("/2012-12-12/hostedzone/Z1PA6795UKMFR9"); } @Test public void deleteWhenPresent_extraRRSet() throws Exception { // Only rrset we expect to delete manually String aRecord = " <ResourceRecordSet>\n" + " <Name>ns-google.denominator.io.</Name>\n" + " <Type>A</Type>\n" + " <TTL>300</TTL>\n" + " <ResourceRecords>\n" + " <ResourceRecord>\n" + " <Value>8.8.8.8</Value>\n" + " </ResourceRecord>\n" + " </ResourceRecords>\n" + " </ResourceRecordSet>\n"; server.enqueue(oneZone); server.enqueue(new MockResponse().setBody( "<ListResourceRecordSetsResponse xmlns=\"https://route53.amazonaws.com/doc/2012-12-12/\">\n" + " <ResourceRecordSets>\n" + nsRRSet + soaRRSet + " </ResourceRecordSets>\n" + " <IsTruncated>true</IsTruncated>\n" + " <NextRecordName>ns-google.denominator.io.</NextRecordName>\n" + " <NextRecordType>A</NextRecordType>\n" + " <MaxItems>2</MaxItems>\n" + "</ListResourceRecordSetsResponse>" )); server.enqueue(new MockResponse().setBody( "<ListResourceRecordSetsResponse xmlns=\"https://route53.amazonaws.com/doc/2012-12-12/\">\n" + " <ResourceRecordSets>\n" + aRecord + " </ResourceRecordSets>\n" + " <IsTruncated>false</IsTruncated>\n" + " <MaxItems>2</MaxItems>\n" + "</ListResourceRecordSetsResponse>" )); server.enqueue(changingRRSets); server.enqueue(deletingZone); ZoneApi api = server.connect().api().zones(); api.delete("Z1PA6795UKMFR9"); server.assertRequest() .hasMethod("GET") .hasPath("/2012-12-12/hostedzone/Z1PA6795UKMFR9"); server.assertRequest() .hasMethod("GET") .hasPath("/2012-12-12/hostedzone/Z1PA6795UKMFR9/rrset"); server.assertRequest() .hasMethod("GET") .hasPath( "/2012-12-12/hostedzone/Z1PA6795UKMFR9/rrset?name=ns-google.denominator.io.&type=A"); server.assertRequest() .hasMethod("POST") .hasPath("/2012-12-12/hostedzone/Z1PA6795UKMFR9/rrset") .hasXMLBody( "<ChangeResourceRecordSetsRequest xmlns=\"https://route53.amazonaws.com/doc/2012-12-12/\">\n" + " <ChangeBatch>\n" + " <Changes>\n" + " <Change>\n" + " <Action>DELETE</Action>\n" + aRecord + " </Change>\n" + " </Changes>\n" + " </ChangeBatch>\n" + "</ChangeResourceRecordSetsRequest>"); server.assertRequest() .hasMethod("DELETE") .hasPath("/2012-12-12/hostedzone/Z1PA6795UKMFR9"); } @Test public void deleteWhenAbsent() throws Exception { server.enqueue(new MockResponse().setResponseCode(404).setBody( "<ErrorResponse xmlns=\"https://route53.amazonaws.com/doc/2012-12-12/\"><Error><Type>Sender</Type><Code>NoSuchHostedZone</Code><Message>The specified hosted zone does not exist.</Message></Error><RequestId>d1862286-da13-11e4-a87a-f78bcee90724</RequestId></ErrorResponse>")); ZoneApi api = server.connect().api().zones(); api.delete("Z1PA6795UKMFR9"); server.assertRequest() .hasMethod("GET") .hasPath("/2012-12-12/hostedzone/Z1PA6795UKMFR9"); } private String soaRRSet = " <ResourceRecordSet>\n" + " <Name>denominator.io.</Name>\n" + " <Type>SOA</Type>\n" + " <TTL>3601</TTL>\n" + " <ResourceRecords>\n" + " <ResourceRecord>\n" + " <Value>ns-1612.awsdns-27.net. test@denominator.io 2 7200 3601 1209600 86400</Value>\n" + " </ResourceRecord>\n" + " </ResourceRecords>\n" + " </ResourceRecordSet>\n"; private String initialSOA = // Initially SOA has a TTL of 900, an amazon rname and serial number 1 soaRRSet.replaceFirst("3601", "900").replace("test@denominator.io 2", "awsdns-hostmaster.amazon.com. 1"); private String nsRRSet = " <ResourceRecordSet>\n" + " <Name>denominator.io.</Name>\n" + " <Type>NS</Type>\n" + " <TTL>172800</TTL>\n" + " <ResourceRecords>\n" + " <ResourceRecord>\n" + " <Value>ns-1612.awsdns-09.co.uk.</Value>\n" + " </ResourceRecord>\n" + " <ResourceRecord>\n" + " <Value>ns-230.awsdns-28.com.</Value>\n" + " </ResourceRecord>\n" + " <ResourceRecord>\n" + " <Value>ns-993.awsdns-60.net.</Value>\n" + " </ResourceRecord>\n" + " <ResourceRecord>\n" + " <Value>ns-1398.awsdns-46.org.</Value>\n" + " </ResourceRecord>\n" + " </ResourceRecords>\n" + " </ResourceRecordSet>\n"; private MockResponse oneZone = new MockResponse().setBody( "<ListHostedZonesResponse>\n" + " <HostedZones>\n" + " <HostedZone>\n" + " <Id>/hostedzone/Z1PA6795UKMFR9</Id>\n" + " <Name>denominator.io.</Name>\n" + " <CallerReference>denomination</CallerReference>\n" + " <Config>\n" + " <Comment>no comment</Comment>\n" + " </Config>\n" + " <ResourceRecordSetCount>3</ResourceRecordSetCount>\n" + " </HostedZone>\n" + " </HostedZones>\n" + "</ListHostedZonesResponse>"); private MockResponse changingRRSets = new MockResponse().setBody( "<ChangeResourceRecordSetsResponse xmlns=\"https://route53.amazonaws.com/doc/2012-12-12/\"><ChangeInfo><Id>/change/CWWCRAXTKEB72d</Id><Status>PENDING</Status><SubmittedAt>2015-04-03T14:41:54.371Z</SubmittedAt></ChangeInfo></ChangeResourceRecordSetsResponse>" ); private MockResponse deletingZone = new MockResponse().setBody( "<DeleteHostedZoneResponse xmlns=\"https://route53.amazonaws.com/doc/2012-12-12/\"><ChangeInfo><Id>/change/C1QB5QU6VYXUHE</Id><Status>PENDING</Status><SubmittedAt>2015-04-03T14:41:54.512Z</SubmittedAt></ChangeInfo></DeleteHostedZoneResponse>" ); }
/* * Copyright 2009 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import com.google.common.collect.Lists; import com.google.javascript.jscomp.MakeDeclaredNamesUnique.InlineRenamer; import com.google.javascript.rhino.Node; /** * @author johnlenz@google.com (John Lenz) */ public class MakeDeclaredNamesUniqueTest extends CompilerTestCase { private boolean useDefaultRenamer = false; private boolean invert = false; private boolean removeConst = false; private final String localNamePrefix = "unique_"; @Override public CompilerPass getProcessor(final Compiler compiler) { if (!invert) { return new CompilerPass() { @Override public void process(Node externs, Node root) { compiler.resetUniqueNameId(); MakeDeclaredNamesUnique renamer = null; if (useDefaultRenamer) { renamer = new MakeDeclaredNamesUnique(); } else { renamer = new MakeDeclaredNamesUnique( new InlineRenamer( compiler.getUniqueNameIdSupplier(), localNamePrefix, removeConst)); } NodeTraversal.traverseRoots( compiler, Lists.newArrayList(externs, root), renamer); } }; } else { return MakeDeclaredNamesUnique.getContextualRenameInverter(compiler); } } @Override protected int getNumRepetitions() { // The normalize pass is only run once. return 1; } @Override public void setUp() { removeConst = false; invert = false; useDefaultRenamer = false; } public void testWithInversion(String original, String expected) { invert = false; test(original, expected); invert = true; test(expected, original); invert = false; } public void testSameWithInversion(String externs, String original) { invert = false; testSame(externs, original, null); invert = true; testSame(externs, original, null); invert = false; } public void testSameWithInversion(String original) { testSameWithInversion("", original); } private String wrapInFunction(String s) { return "function f(){" + s + "}"; } public void testInFunction(String original, String expected) { test(wrapInFunction(original), wrapInFunction(expected)); } public void testSameInFunction(String original) { testSame(wrapInFunction(original)); } public void testMakeLocalNamesUniqueWithContext1() { // Set the test type this.useDefaultRenamer = true; invert = true; test( "var a;function foo(){var a$$inline_1; a = 1}", "var a;function foo(){var a$$0; a = 1}"); test( "var a;function foo(){var a$$inline_1;}", "var a;function foo(){var a;}"); } public void testMakeLocalNamesUniqueWithContext2() { // Set the test type this.useDefaultRenamer = true; // Verify global names are untouched. testSameWithInversion("var a;"); // Verify global names are untouched. testSameWithInversion("a;"); // Local names are made unique. testWithInversion( "var a;function foo(a){var b;a}", "var a;function foo(a$$1){var b;a$$1}"); testWithInversion( "var a;function foo(){var b;a}function boo(){var b;a}", "var a;function foo(){var b;a}function boo(){var b$$1;a}"); testWithInversion( "function foo(a){var b}" + "function boo(a){var b}", "function foo(a){var b}" + "function boo(a$$1){var b$$1}"); // Verify functions expressions are renamed. testWithInversion( "var a = function foo(){foo()};var b = function foo(){foo()};", "var a = function foo(){foo()};var b = function foo$$1(){foo$$1()};"); // Verify catch exceptions names are made unique testWithInversion( "try { } catch(e) {e;}", "try { } catch(e) {e;}"); // Inversion does not handle exceptions correctly. test( "try { } catch(e) {e;}; try { } catch(e) {e;}", "try { } catch(e) {e;}; try { } catch(e$$1) {e$$1;}"); test( "try { } catch(e) {e; try { } catch(e) {e;}};", "try { } catch(e) {e; try { } catch(e$$1) {e$$1;} }; "); } public void testMakeLocalNamesUniqueWithContext3() { // Set the test type this.useDefaultRenamer = true; String externs = "var extern1 = {};"; // Verify global names are untouched. testSameWithInversion(externs, "var extern1 = extern1 || {};"); // Verify global names are untouched. testSame(externs, "var extern1 = extern1 || {};", null); } public void testMakeLocalNamesUniqueWithContext4() { // Set the test type this.useDefaultRenamer = true; // Inversion does not handle exceptions correctly. testInFunction( "var e; try { } catch(e) {e;}; try { } catch(e) {e;}", "var e; try { } catch(e$$1) {e$$1;}; try { } catch(e$$2) {e$$2;}"); testInFunction( "var e; try { } catch(e) {e; try { } catch(e) {e;}}", "var e; try { } catch(e$$1) {e$$1; try { } catch(e$$2) {e$$2;} }"); testInFunction( "try { } catch(e) {e;}; try { } catch(e) {e;} var e;", "try { } catch(e$$1) {e$$1;}; try { } catch(e$$2) {e$$2;} var e;"); testInFunction( "try { } catch(e) {e; try { } catch(e) {e;}} var e;", "try { } catch(e$$1) {e$$1; try { } catch(e$$2) {e$$2;} } var e;"); invert = true; testInFunction( "var e; try { } catch(e$$0) {e$$0;}; try { } catch(e$$1) {e$$1;}", "var e; try { } catch(e$$2) {e$$2;}; try { } catch(e$$0) {e$$0;}"); testInFunction( "var e; try { } catch(e$$1) {e$$1; try { } catch(e$$2) {e$$2;} };", "var e; try { } catch(e$$0) {e$$0; try { } catch(e$$1) {e$$1;} };"); testInFunction( "try { } catch(e) {e;}; try { } catch(e$$1) {e$$1;};var e$$2;", "try { } catch(e) {e;}; try { } catch(e$$0) {e$$0;};var e$$1;"); testInFunction( "try { } catch(e) {e; try { } catch(e$$1) {e$$1;} };var e$$2", "try { } catch(e) {e; try { } catch(e$$0) {e$$0;} };var e$$1"); } public void testArguments() { // Set the test type this.useDefaultRenamer = true; // Don't distinguish between "arguments", it can't be made unique. testSameWithInversion( "function foo(){var arguments;function bar(){var arguments;}}"); invert = true; // Don't introduce new references to arguments, it is special. test( "function foo(){var arguments$$1;}", "function foo(){var arguments$$0;}"); } public void testMakeLocalNamesUniqueWithoutContext() { // Set the test type this.useDefaultRenamer = false; test("var a;", "var a$$unique_0"); // Verify undeclared names are untouched. testSame("a;"); // Local names are made unique. test("var a;" + "function foo(a){var b;a}", "var a$$unique_0;" + "function foo$$unique_1(a$$unique_2){var b$$unique_3;a$$unique_2}"); test("var a;" + "function foo(){var b;a}" + "function boo(){var b;a}", "var a$$unique_0;" + "function foo$$unique_1(){var b$$unique_3;a$$unique_0}" + "function boo$$unique_2(){var b$$unique_4;a$$unique_0}"); // Verify function expressions are renamed. test("var a = function foo(){foo()};", "var a$$unique_0 = function foo$$unique_1(){foo$$unique_1()};"); // Verify catch exceptions names are made unique test("try { } catch(e) {e;}", "try { } catch(e$$unique_0) {e$$unique_0;}"); test("try { } catch(e) {e;};" + "try { } catch(e) {e;}", "try { } catch(e$$unique_0) {e$$unique_0;};" + "try { } catch(e$$unique_1) {e$$unique_1;}"); test("try { } catch(e) {e; " + "try { } catch(e) {e;}};", "try { } catch(e$$unique_0) {e$$unique_0; " + "try { } catch(e$$unique_1) {e$$unique_1;} }; "); } public void testOnlyInversion() { invert = true; test("function f(a, a$$1) {}", "function f(a, a$$0) {}"); test("function f(a$$1, b$$2) {}", "function f(a, b) {}"); test("function f(a$$1, a$$2) {}", "function f(a, a$$0) {}"); testSame("try { } catch(e) {e;}; try { } catch(e$$1) {e$$1;}"); testSame("try { } catch(e) {e; try { } catch(e$$1) {e$$1;} }; "); testSame("var a$$1;"); testSame("function f() { var $$; }"); test("var CONST = 3; var b = CONST;", "var CONST = 3; var b = CONST;"); test("function() {var CONST = 3; var ACONST$$1 = 2;}", "function() {var CONST = 3; var ACONST = 2;}"); } public void testOnlyInversion2() { invert = true; test("function () {try { } catch(e) {e;}; try { } catch(e$$0) {e$$0;}}", "function () {try { } catch(e) {e;}; try { } catch(e$$1) {e$$1;}}"); } public void testOnlyInversion3() { invert = true; test( "function x1() {" + " var a$$1;" + " function x2() {" + " var a$$2;" + " }" + " function x3() {" + " var a$$3;" + " }" + "}", "function x1() {" + " var a$$0;" + " function x2() {" + " var a;" + " }" + " function x3() {" + " var a;" + " }" + "}"); } public void testOnlyInversion4() { invert = true; test( "function x1() {" + " var a$$0;" + " function x2() {" + " var a;a$$0++" + " }" + "}", "function x1() {" + " var a$$1;" + " function x2() {" + " var a;a$$1++" + " }" + "}"); } public void testConstRemovingRename1() { removeConst = true; test("function() {var CONST = 3; var ACONST$$1 = 2;}", "function() {var CONST$$unique_0 = 3; var ACONST$$unique_1 = 2;}"); } public void testConstRemovingRename2() { removeConst = true; test("var CONST = 3; var b = CONST;", "var CONST$$unique_0 = 3; var b$$unique_1 = CONST$$unique_0;"); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.spi.block; import org.openjdk.jol.info.ClassLayout; import javax.annotation.Nullable; import java.util.Arrays; import java.util.function.BiConsumer; import static com.facebook.presto.spi.block.BlockUtil.calculateBlockResetSize; import static com.facebook.presto.spi.block.BlockUtil.checkArrayRange; import static com.facebook.presto.spi.block.BlockUtil.checkValidRegion; import static com.facebook.presto.spi.block.BlockUtil.countUsedPositions; import static com.facebook.presto.spi.block.BlockUtil.internalPositionInRange; import static io.airlift.slice.SizeOf.sizeOf; import static java.lang.Math.max; public class ShortArrayBlockBuilder implements BlockBuilder { private static final int INSTANCE_SIZE = ClassLayout.parseClass(ShortArrayBlockBuilder.class).instanceSize(); private static final Block NULL_VALUE_BLOCK = new ShortArrayBlock(0, 1, new boolean[] {true}, new short[1]); @Nullable private BlockBuilderStatus blockBuilderStatus; private boolean initialized; private int initialEntryCount; private int positionCount; private boolean hasNullValue; private boolean hasNonNullValue; // it is assumed that these arrays are the same length private boolean[] valueIsNull = new boolean[0]; private short[] values = new short[0]; private long retainedSizeInBytes; public ShortArrayBlockBuilder(@Nullable BlockBuilderStatus blockBuilderStatus, int expectedEntries) { this.blockBuilderStatus = blockBuilderStatus; this.initialEntryCount = max(expectedEntries, 1); updateDataSize(); } @Override public BlockBuilder writeShort(int value) { if (values.length <= positionCount) { growCapacity(); } values[positionCount] = (short) value; hasNonNullValue = true; positionCount++; if (blockBuilderStatus != null) { blockBuilderStatus.addBytes(Byte.BYTES + Short.BYTES); } return this; } @Override public BlockBuilder closeEntry() { return this; } @Override public BlockBuilder appendNull() { if (values.length <= positionCount) { growCapacity(); } valueIsNull[positionCount] = true; hasNullValue = true; positionCount++; if (blockBuilderStatus != null) { blockBuilderStatus.addBytes(Byte.BYTES + Short.BYTES); } return this; } @Override public Block build() { if (!hasNonNullValue) { return new RunLengthEncodedBlock(NULL_VALUE_BLOCK, positionCount); } return new ShortArrayBlock(0, positionCount, valueIsNull, values); } @Override public BlockBuilder newBlockBuilderLike(BlockBuilderStatus blockBuilderStatus) { return new ShortArrayBlockBuilder(blockBuilderStatus, calculateBlockResetSize(positionCount)); } private void growCapacity() { int newSize; if (initialized) { newSize = BlockUtil.calculateNewArraySize(values.length); } else { newSize = initialEntryCount; initialized = true; } valueIsNull = Arrays.copyOf(valueIsNull, newSize); values = Arrays.copyOf(values, newSize); updateDataSize(); } private void updateDataSize() { retainedSizeInBytes = INSTANCE_SIZE + sizeOf(valueIsNull) + sizeOf(values); if (blockBuilderStatus != null) { retainedSizeInBytes += BlockBuilderStatus.INSTANCE_SIZE; } } @Override public long getSizeInBytes() { return (Short.BYTES + Byte.BYTES) * (long) positionCount; } @Override public long getRegionSizeInBytes(int position, int length) { return (Short.BYTES + Byte.BYTES) * (long) length; } @Override public long getPositionsSizeInBytes(boolean[] positions) { return (Short.BYTES + Byte.BYTES) * (long) countUsedPositions(positions); } @Override public long getRetainedSizeInBytes() { return retainedSizeInBytes; } @Override public long getEstimatedDataSizeForStats(int position) { return isNull(position) ? 0 : Short.BYTES; } @Override public void retainedBytesForEachPart(BiConsumer<Object, Long> consumer) { consumer.accept(values, sizeOf(values)); consumer.accept(valueIsNull, sizeOf(valueIsNull)); consumer.accept(this, (long) INSTANCE_SIZE); } @Override public int getPositionCount() { return positionCount; } @Override public short getShort(int position) { checkReadablePosition(position); return values[position]; } @Override public boolean mayHaveNull() { return hasNullValue; } @Override public boolean isNull(int position) { checkReadablePosition(position); return valueIsNull[position]; } @Override public void writePositionTo(int position, BlockBuilder blockBuilder) { checkReadablePosition(position); blockBuilder.writeShort(values[position]); blockBuilder.closeEntry(); } @Override public Block getSingleValueBlock(int position) { checkReadablePosition(position); return new ShortArrayBlock( 0, 1, valueIsNull[position] ? new boolean[] {true} : null, new short[] {values[position]}); } @Override public Block copyPositions(int[] positions, int offset, int length) { checkArrayRange(positions, offset, length); if (!hasNonNullValue) { return new RunLengthEncodedBlock(NULL_VALUE_BLOCK, length); } boolean[] newValueIsNull = null; if (hasNullValue) { newValueIsNull = new boolean[length]; } short[] newValues = new short[length]; for (int i = 0; i < length; i++) { int position = positions[offset + i]; checkReadablePosition(position); if (hasNullValue) { newValueIsNull[i] = valueIsNull[position]; } newValues[i] = values[position]; } return new ShortArrayBlock(0, length, newValueIsNull, newValues); } @Override public Block getRegion(int positionOffset, int length) { checkValidRegion(getPositionCount(), positionOffset, length); if (!hasNonNullValue) { return new RunLengthEncodedBlock(NULL_VALUE_BLOCK, length); } return new ShortArrayBlock(positionOffset, length, hasNullValue ? valueIsNull : null, values); } @Override public Block copyRegion(int positionOffset, int length) { checkValidRegion(getPositionCount(), positionOffset, length); if (!hasNonNullValue) { return new RunLengthEncodedBlock(NULL_VALUE_BLOCK, length); } boolean[] newValueIsNull = null; if (hasNullValue) { newValueIsNull = Arrays.copyOfRange(valueIsNull, positionOffset, positionOffset + length); } short[] newValues = Arrays.copyOfRange(values, positionOffset, positionOffset + length); return new ShortArrayBlock(0, length, newValueIsNull, newValues); } @Override public String getEncodingName() { return ShortArrayBlockEncoding.NAME; } @Override public String toString() { StringBuilder sb = new StringBuilder("ShortArrayBlockBuilder{"); sb.append("positionCount=").append(getPositionCount()); sb.append('}'); return sb.toString(); } private void checkReadablePosition(int position) { if (position < 0 || position >= getPositionCount()) { throw new IllegalArgumentException("position is not valid"); } } @Override public short getShortUnchecked(int internalPosition) { assert internalPositionInRange(internalPosition, getOffsetBase(), getPositionCount()); return values[internalPosition]; } @Override public boolean isNullUnchecked(int internalPosition) { assert mayHaveNull() : "no nulls present"; assert internalPositionInRange(internalPosition, getOffsetBase(), getPositionCount()); return valueIsNull[internalPosition]; } @Override public int getOffsetBase() { return 0; } }
/** * Copyright 2014 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package rx.exceptions; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import java.io.ByteArrayOutputStream; import java.io.PrintStream; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import org.junit.Test; import rx.exceptions.CompositeException.CompositeExceptionCausalChain; public class CompositeExceptionTest { private final Throwable ex1 = new Throwable("Ex1"); private final Throwable ex2 = new Throwable("Ex2", ex1); private final Throwable ex3 = new Throwable("Ex3", ex2); public CompositeExceptionTest() { } private CompositeException getNewCompositeExceptionWithEx123() { List<Throwable> throwables = new ArrayList<Throwable>(); throwables.add(ex1); throwables.add(ex2); throwables.add(ex3); return new CompositeException(throwables); } @Test(timeout = 1000) public void testMultipleWithSameCause() { Throwable rootCause = new Throwable("RootCause"); Throwable e1 = new Throwable("1", rootCause); Throwable e2 = new Throwable("2", rootCause); Throwable e3 = new Throwable("3", rootCause); CompositeException ce = new CompositeException(Arrays.asList(e1, e2, e3)); System.err.println("----------------------------- print composite stacktrace"); ce.printStackTrace(); assertEquals(3, ce.getExceptions().size()); assertNoCircularReferences(ce); assertNotNull(getRootCause(ce)); System.err.println("----------------------------- print cause stacktrace"); ce.getCause().printStackTrace(); } @Test(timeout = 1000) public void testCompositeExceptionFromParentThenChild() { CompositeException cex = new CompositeException(Arrays.asList(ex1, ex2)); System.err.println("----------------------------- print composite stacktrace"); cex.printStackTrace(); assertEquals(2, cex.getExceptions().size()); assertNoCircularReferences(cex); assertNotNull(getRootCause(cex)); System.err.println("----------------------------- print cause stacktrace"); cex.getCause().printStackTrace(); } @Test(timeout = 1000) public void testCompositeExceptionFromChildThenParent() { CompositeException cex = new CompositeException(Arrays.asList(ex2, ex1)); System.err.println("----------------------------- print composite stacktrace"); cex.printStackTrace(); assertEquals(2, cex.getExceptions().size()); assertNoCircularReferences(cex); assertNotNull(getRootCause(cex)); System.err.println("----------------------------- print cause stacktrace"); cex.getCause().printStackTrace(); } @Test(timeout = 1000) public void testCompositeExceptionFromChildAndComposite() { CompositeException cex = new CompositeException(Arrays.asList(ex1, getNewCompositeExceptionWithEx123())); System.err.println("----------------------------- print composite stacktrace"); cex.printStackTrace(); assertEquals(3, cex.getExceptions().size()); assertNoCircularReferences(cex); assertNotNull(getRootCause(cex)); System.err.println("----------------------------- print cause stacktrace"); cex.getCause().printStackTrace(); } @Test(timeout = 1000) public void testCompositeExceptionFromCompositeAndChild() { CompositeException cex = new CompositeException(Arrays.asList(getNewCompositeExceptionWithEx123(), ex1)); System.err.println("----------------------------- print composite stacktrace"); cex.printStackTrace(); assertEquals(3, cex.getExceptions().size()); assertNoCircularReferences(cex); assertNotNull(getRootCause(cex)); System.err.println("----------------------------- print cause stacktrace"); cex.getCause().printStackTrace(); } @Test(timeout = 1000) public void testCompositeExceptionFromTwoDuplicateComposites() { List<Throwable> exs = new ArrayList<Throwable>(); exs.add(getNewCompositeExceptionWithEx123()); exs.add(getNewCompositeExceptionWithEx123()); CompositeException cex = new CompositeException(exs); System.err.println("----------------------------- print composite stacktrace"); cex.printStackTrace(); assertEquals(3, cex.getExceptions().size()); assertNoCircularReferences(cex); assertNotNull(getRootCause(cex)); System.err.println("----------------------------- print cause stacktrace"); cex.getCause().printStackTrace(); } /** * This hijacks the Throwable.printStackTrace() output and puts it in a string, where we can look for * "CIRCULAR REFERENCE" (a String added by Throwable.printEnclosedStackTrace) */ private static void assertNoCircularReferences(Throwable ex) { ByteArrayOutputStream baos = new ByteArrayOutputStream(); PrintStream printStream = new PrintStream(baos); ex.printStackTrace(printStream); assertFalse(baos.toString().contains("CIRCULAR REFERENCE")); } private static Throwable getRootCause(Throwable ex) { Throwable root = ex.getCause(); if (root == null) { return null; } else { while(true) { if (root.getCause() == null) { return root; } else { root = root.getCause(); } } } } @Test public void testNullCollection() { CompositeException composite = new CompositeException((List<Throwable>)null); composite.getCause(); composite.printStackTrace(); } @Test public void testNullElement() { CompositeException composite = new CompositeException(Collections.singletonList((Throwable) null)); composite.getCause(); composite.printStackTrace(); } @Test(timeout = 1000) public void testCompositeExceptionWithUnsupportedInitCause() { Throwable t = new Throwable() { /** */ private static final long serialVersionUID = -3282577447436848385L; @Override public synchronized Throwable initCause(Throwable cause) { throw new UnsupportedOperationException(); } }; CompositeException cex = new CompositeException(Arrays.asList(t, ex1)); System.err.println("----------------------------- print composite stacktrace"); cex.printStackTrace(); assertEquals(2, cex.getExceptions().size()); assertNoCircularReferences(cex); assertNotNull(getRootCause(cex)); System.err.println("----------------------------- print cause stacktrace"); cex.getCause().printStackTrace(); } @Test(timeout = 1000) public void testCompositeExceptionWithNullInitCause() { Throwable t = new Throwable("ThrowableWithNullInitCause") { /** */ private static final long serialVersionUID = -7984762607894527888L; @Override public synchronized Throwable initCause(Throwable cause) { return null; } }; CompositeException cex = new CompositeException(Arrays.asList(t, ex1)); System.err.println("----------------------------- print composite stacktrace"); cex.printStackTrace(); assertEquals(2, cex.getExceptions().size()); assertNoCircularReferences(cex); assertNotNull(getRootCause(cex)); System.err.println("----------------------------- print cause stacktrace"); cex.getCause().printStackTrace(); } @Test public void messageCollection() { CompositeException compositeException = new CompositeException(Arrays.asList(ex1, ex3)); assertEquals("2 exceptions occurred. ", compositeException.getMessage()); } @Test public void messageVarargs() { CompositeException compositeException = new CompositeException(ex1, ex2, ex3); assertEquals("3 exceptions occurred. ", compositeException.getMessage()); } @Test public void complexCauses() { Throwable e1 = new Throwable("1"); Throwable e2 = new Throwable("2"); e1.initCause(e2); Throwable e3 = new Throwable("3"); Throwable e4 = new Throwable("4"); e3.initCause(e4); Throwable e5 = new Throwable("5"); Throwable e6 = new Throwable("6"); e5.initCause(e6); CompositeException compositeException = new CompositeException(e1, e3, e5); assert(compositeException.getCause() instanceof CompositeExceptionCausalChain); List<Throwable> causeChain = new ArrayList<Throwable>(); Throwable cause = compositeException.getCause().getCause(); while (cause != null) { causeChain.add(cause); cause = cause.getCause(); } // The original relations // // e1 -> e2 // e3 -> e4 // e5 -> e6 // // will be set to // // e1 -> e2 -> e3 -> e4 -> e5 -> e6 assertEquals(Arrays.asList(e1, e2, e3, e4, e5, e6), causeChain); } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.workspaces.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/workspaces-2015-04-08/CreateUpdatedWorkspaceImage" * target="_top">AWS API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class CreateUpdatedWorkspaceImageRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The name of the new updated WorkSpace image. * </p> */ private String name; /** * <p> * A description of whether updates for the WorkSpace image are available. * </p> */ private String description; /** * <p> * The identifier of the source WorkSpace image. * </p> */ private String sourceImageId; /** * <p> * The tags that you want to add to the new updated WorkSpace image. * </p> * <note> * <p> * To add tags at the same time when you're creating the updated image, you must create an IAM policy that grants * your IAM user permissions to use <code>workspaces:CreateTags</code>. * </p> * </note> */ private com.amazonaws.internal.SdkInternalList<Tag> tags; /** * <p> * The name of the new updated WorkSpace image. * </p> * * @param name * The name of the new updated WorkSpace image. */ public void setName(String name) { this.name = name; } /** * <p> * The name of the new updated WorkSpace image. * </p> * * @return The name of the new updated WorkSpace image. */ public String getName() { return this.name; } /** * <p> * The name of the new updated WorkSpace image. * </p> * * @param name * The name of the new updated WorkSpace image. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateUpdatedWorkspaceImageRequest withName(String name) { setName(name); return this; } /** * <p> * A description of whether updates for the WorkSpace image are available. * </p> * * @param description * A description of whether updates for the WorkSpace image are available. */ public void setDescription(String description) { this.description = description; } /** * <p> * A description of whether updates for the WorkSpace image are available. * </p> * * @return A description of whether updates for the WorkSpace image are available. */ public String getDescription() { return this.description; } /** * <p> * A description of whether updates for the WorkSpace image are available. * </p> * * @param description * A description of whether updates for the WorkSpace image are available. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateUpdatedWorkspaceImageRequest withDescription(String description) { setDescription(description); return this; } /** * <p> * The identifier of the source WorkSpace image. * </p> * * @param sourceImageId * The identifier of the source WorkSpace image. */ public void setSourceImageId(String sourceImageId) { this.sourceImageId = sourceImageId; } /** * <p> * The identifier of the source WorkSpace image. * </p> * * @return The identifier of the source WorkSpace image. */ public String getSourceImageId() { return this.sourceImageId; } /** * <p> * The identifier of the source WorkSpace image. * </p> * * @param sourceImageId * The identifier of the source WorkSpace image. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateUpdatedWorkspaceImageRequest withSourceImageId(String sourceImageId) { setSourceImageId(sourceImageId); return this; } /** * <p> * The tags that you want to add to the new updated WorkSpace image. * </p> * <note> * <p> * To add tags at the same time when you're creating the updated image, you must create an IAM policy that grants * your IAM user permissions to use <code>workspaces:CreateTags</code>. * </p> * </note> * * @return The tags that you want to add to the new updated WorkSpace image.</p> <note> * <p> * To add tags at the same time when you're creating the updated image, you must create an IAM policy that * grants your IAM user permissions to use <code>workspaces:CreateTags</code>. * </p> */ public java.util.List<Tag> getTags() { if (tags == null) { tags = new com.amazonaws.internal.SdkInternalList<Tag>(); } return tags; } /** * <p> * The tags that you want to add to the new updated WorkSpace image. * </p> * <note> * <p> * To add tags at the same time when you're creating the updated image, you must create an IAM policy that grants * your IAM user permissions to use <code>workspaces:CreateTags</code>. * </p> * </note> * * @param tags * The tags that you want to add to the new updated WorkSpace image.</p> <note> * <p> * To add tags at the same time when you're creating the updated image, you must create an IAM policy that * grants your IAM user permissions to use <code>workspaces:CreateTags</code>. * </p> */ public void setTags(java.util.Collection<Tag> tags) { if (tags == null) { this.tags = null; return; } this.tags = new com.amazonaws.internal.SdkInternalList<Tag>(tags); } /** * <p> * The tags that you want to add to the new updated WorkSpace image. * </p> * <note> * <p> * To add tags at the same time when you're creating the updated image, you must create an IAM policy that grants * your IAM user permissions to use <code>workspaces:CreateTags</code>. * </p> * </note> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setTags(java.util.Collection)} or {@link #withTags(java.util.Collection)} if you want to override the * existing values. * </p> * * @param tags * The tags that you want to add to the new updated WorkSpace image.</p> <note> * <p> * To add tags at the same time when you're creating the updated image, you must create an IAM policy that * grants your IAM user permissions to use <code>workspaces:CreateTags</code>. * </p> * @return Returns a reference to this object so that method calls can be chained together. */ public CreateUpdatedWorkspaceImageRequest withTags(Tag... tags) { if (this.tags == null) { setTags(new com.amazonaws.internal.SdkInternalList<Tag>(tags.length)); } for (Tag ele : tags) { this.tags.add(ele); } return this; } /** * <p> * The tags that you want to add to the new updated WorkSpace image. * </p> * <note> * <p> * To add tags at the same time when you're creating the updated image, you must create an IAM policy that grants * your IAM user permissions to use <code>workspaces:CreateTags</code>. * </p> * </note> * * @param tags * The tags that you want to add to the new updated WorkSpace image.</p> <note> * <p> * To add tags at the same time when you're creating the updated image, you must create an IAM policy that * grants your IAM user permissions to use <code>workspaces:CreateTags</code>. * </p> * @return Returns a reference to this object so that method calls can be chained together. */ public CreateUpdatedWorkspaceImageRequest withTags(java.util.Collection<Tag> tags) { setTags(tags); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getName() != null) sb.append("Name: ").append(getName()).append(","); if (getDescription() != null) sb.append("Description: ").append(getDescription()).append(","); if (getSourceImageId() != null) sb.append("SourceImageId: ").append(getSourceImageId()).append(","); if (getTags() != null) sb.append("Tags: ").append(getTags()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof CreateUpdatedWorkspaceImageRequest == false) return false; CreateUpdatedWorkspaceImageRequest other = (CreateUpdatedWorkspaceImageRequest) obj; if (other.getName() == null ^ this.getName() == null) return false; if (other.getName() != null && other.getName().equals(this.getName()) == false) return false; if (other.getDescription() == null ^ this.getDescription() == null) return false; if (other.getDescription() != null && other.getDescription().equals(this.getDescription()) == false) return false; if (other.getSourceImageId() == null ^ this.getSourceImageId() == null) return false; if (other.getSourceImageId() != null && other.getSourceImageId().equals(this.getSourceImageId()) == false) return false; if (other.getTags() == null ^ this.getTags() == null) return false; if (other.getTags() != null && other.getTags().equals(this.getTags()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getName() == null) ? 0 : getName().hashCode()); hashCode = prime * hashCode + ((getDescription() == null) ? 0 : getDescription().hashCode()); hashCode = prime * hashCode + ((getSourceImageId() == null) ? 0 : getSourceImageId().hashCode()); hashCode = prime * hashCode + ((getTags() == null) ? 0 : getTags().hashCode()); return hashCode; } @Override public CreateUpdatedWorkspaceImageRequest clone() { return (CreateUpdatedWorkspaceImageRequest) super.clone(); } }
/*============================================================================== Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ package gr.forth.ics.isl.x3ml.engine; import com.hp.hpl.jena.graph.Node; import com.hp.hpl.jena.rdf.model.Literal; import com.hp.hpl.jena.rdf.model.Model; import com.hp.hpl.jena.rdf.model.Property; import com.hp.hpl.jena.rdf.model.Resource; import com.hp.hpl.jena.rdf.model.Statement; import com.hp.hpl.jena.rdf.model.StmtIterator; import com.hp.hpl.jena.rdf.model.impl.ResourceImpl; import com.hp.hpl.jena.sparql.core.DatasetGraph; import com.hp.hpl.jena.sparql.core.DatasetGraphSimpleMem; import com.hp.hpl.jena.sparql.core.Quad; import javax.xml.namespace.NamespaceContext; import java.io.ByteArrayOutputStream; import java.io.PrintStream; import static gr.forth.ics.isl.x3ml.X3MLEngine.Output; import static gr.forth.ics.isl.x3ml.X3MLEngine.exception; import static gr.forth.ics.isl.x3ml.engine.X3ML.TypeElement; import org.apache.jena.riot.Lang; import org.apache.jena.riot.RDFDataMgr; import java.util.Iterator; import gr.forth.Labels; import java.io.File; import java.io.FileNotFoundException; import java.io.OutputStream; import lombok.extern.log4j.Log4j2; /** * The class is responsible for exporting the transformation results. More specifically * it exports the contents of the Jena graph model. * It supports exporting triples and quads. * * @author Gerald de Jong &lt;gerald@delving.eu&gt; * @author Nikos Minadakis &lt;minadakn@ics.forth.gr&gt; * @author Yannis Marketakis &lt;marketak@ics.forth.gr&gt; */ @Log4j2 public class ModelOutput implements Output { public static DatasetGraph quadGraph=new DatasetGraphSimpleMem(); private final Model model; private final NamespaceContext namespaceContext; public ModelOutput(Model model, NamespaceContext namespaceContext) { this.model = model; this.namespaceContext = namespaceContext; } @Override public Model getModel() { return model; } public String getNamespace(TypeElement typeElement){ if (typeElement == null) { throw exception("Missing qualified name"); } if (typeElement.getLocalName().startsWith("http:")){ return typeElement.getLocalName(); }else{ String typeElementNamespace = namespaceContext.getNamespaceURI(typeElement.getPrefix()); return typeElementNamespace+typeElement.getLocalName(); } } public Resource createTypedResource(String uriString, TypeElement typeElement) { if (typeElement == null) { throw exception("Missing qualified name"); } if (typeElement.getLocalName().startsWith("http:")){ String typeElementNamespace = ""; return model.createResource(uriString, model.createResource(typeElementNamespace + typeElement.getLocalName())); }else{ String typeElementNamespace = namespaceContext.getNamespaceURI(typeElement.getPrefix()); if(typeElementNamespace==null){ throw exception("The namespace with prefix \""+typeElement.getPrefix()+"\" has not been declared"); } return model.createResource(uriString, model.createResource(typeElementNamespace + typeElement.getLocalName())); } } /* Used for creating labels (rdfs:label or skos:label) */ public Property createProperty(TypeElement typeElement) { if (typeElement == null) { throw exception("Missing qualified name"); } if (typeElement.getLocalName().startsWith("http:")){ String typeElementNamespace = ""; return model.createProperty(typeElementNamespace, typeElement.getLocalName()); }else{ String typeElementNamespace = namespaceContext.getNamespaceURI(typeElement.getPrefix()); if(typeElementNamespace==null){ throw exception("The namespace with prefix \""+typeElement.getPrefix()+"\" has not been declared"); } return model.createProperty(typeElementNamespace, typeElement.getLocalName()); } } public Property createProperty(X3ML.Relationship relationship) { if (relationship == null) { throw exception("Missing qualified name"); } if (relationship.getLocalName().startsWith("http:")){ String propertyNamespace = ""; return model.createProperty(propertyNamespace, relationship.getLocalName()); }else if (relationship.getLocalName().equals("MERGE")){ return null; } else{ String propertyNamespace = namespaceContext.getNamespaceURI(relationship.getPrefix()); if(propertyNamespace==null){ throw exception("The namespace with prefix \""+relationship.getPrefix()+"\" has not been declared"); } return model.createProperty(propertyNamespace, relationship.getLocalName()); } } public Literal createLiteral(String value, String language) { return model.createLiteral(value, language); } public Literal createTypedLiteral(String value, TypeElement typeElement) { String literalNamespace = namespaceContext.getNamespaceURI(typeElement.getPrefix()); String typeUri = literalNamespace + typeElement.getLocalName(); if(literalNamespace == null) { //we have a fully qualified namespace (e.g. http://www.w3.org/2001/XMLSchema#dateTime) typeUri=typeElement.getLocalName(); } return model.createTypedLiteral(value, typeUri); } /** Exports the transformed contents of graph in XML abbreviated RDF format using the given output stream. * * @param out the output stream that will be used for exporting the transformed contents */ @Override public void writeXML(OutputStream out) { if(X3ML.RootElement.hasNamedGraphs){ this.updateNamedgraphRefs(XPathInput.entireInputExportedRefUri); this.writeQuads(out); }else{ model.write(out, Labels.OUTPUT_FORMAT_RDF_XML_ABBREV); } } private void updateNamedgraphRefs(String uri){ Iterator<Quad> qIter=quadGraph.find(Node.ANY, Node.ANY, Node.ANY, Node.ANY); while(qIter.hasNext()){ quadGraph.add(new ResourceImpl("http://default").asNode(), new ResourceImpl(uri).asNode(), new ResourceImpl("http://PX_is_refered_by").asNode(), new ResourceImpl(qIter.next().getGraph().getURI()).asNode()); } } /** Exports the transformed contents of graph in RDF/XML format using the given output stream. * * @param out the output stream that will be used for exporting the transformed contents */ public void writeXMLPlain(OutputStream out) { model.write(out, Labels.OUTPUT_FORMAT_RDF_XML); } /** Exports the transformed contents of graph in NTRIPLES format using the given output stream. * * @param out the output stream that will be used for exporting the transformed contents */ public void writeNTRIPLE(OutputStream out) { model.write(out, Labels.OUTPUT_FORMAT_NTRIPLE); } /** Exports the transformed contents of graph in TURTLE format using the given output stream. * * @param out the output stream that will be used for exporting the transformed contents */ public void writeTURTLE(OutputStream out) { model.write(out, Labels.OUTPUT_FORMAT_TURTLE); } /** Exports the transformed contents of graph in TURTLE format using the given output stream. * * @param out the output stream that will be used for exporting the transformed contents */ public void writeJsonLD(OutputStream out) { model.write(out, Labels.OUTPUT_FORMAT_JSON_LD); } /** Exports the transformed contents of the Jena model in the given output stream with respect to * the given format. Depending on the selected format the contents can be exported as triples or * as quads. More specifically, if namedgraphs have been used within the mappings, then the transformed * contents will be exported in TRIG format (even if the given format is different). * * @param out the output stream that will be used for exporting the transformed contents * @param format the export format. It can be any of the following: [application/rdf+xml, * application/rdf+xml_plain, * application/n-triples, * application/trig, * text/turtle] */ @Override public void write(OutputStream out, String format) { if(X3ML.RootElement.hasNamedGraphs){ //export quads if(!Labels.OUTPUT_MIME_TYPE_TRIG.equalsIgnoreCase(format)){ log.warn("Invalid mime type used for exporting quads."); File outputFileTrig=new File("output-"+System.currentTimeMillis()+"."+Labels.TRIG); log.warn("Exporting contents in TRIG format in file "+outputFileTrig); try{ writeQuads(new PrintStream(outputFileTrig)); }catch(FileNotFoundException ex){ throw exception("An error occurred while exporting Quads",ex); } }else{ writeQuads(out); } }else{ //export triples if (Labels.OUTPUT_MIME_TYPE_NTRIPLES.equalsIgnoreCase(format)) { writeNTRIPLE(out); } else if (Labels.OUTPUT_MIME_TYPE_TURTLE.equalsIgnoreCase(format)) { writeTURTLE(out); } else if (Labels.OUTPUT_MIME_TYPE_RDF_XML.equalsIgnoreCase(format)) { writeXML(out); } else if (Labels.OUTPUT_MIME_TYPE_RDF_XML_ABBREV.equalsIgnoreCase(format)){ writeXMLPlain(out); } else if (Labels.OUTPUT_MIME_TYPE_TRIG.equalsIgnoreCase(format)){ writeQuads(out); } else if (Labels.OUTPUT_MIME_TYPE_JSON_LD.equalsIgnoreCase(format)){ writeJsonLD(out); }else { writeXML(out); } } } /** Exports the transformed contents of graph as Quads using the given output stream. * The contents are exported in TRIG format. * This method is used when: (a) the mappings contain namedgraphs, (b) the user defined trig as the export format. * * @param out the output stream that will be used for exporting the transformed contents */ public void writeQuads(OutputStream out){ StmtIterator stIter=model.listStatements(); String defaultGraphSpace="http://default"; if(quadGraph.isEmpty()){ // No namedgraphs were used, so output everything from the triples model in the quadGraph to export it in TRIG format Node defgraph=new ResourceImpl(defaultGraphSpace).asNode(); while(stIter.hasNext()){ Statement st=stIter.next(); quadGraph.add(defgraph, st.getSubject().asNode(), st.getPredicate().asNode(), st.getObject().asNode()); } }else{ // There are namedgraphs, So export in the default graph only those triples that are not assigned any namedgraph Node defgraph=new ResourceImpl(defaultGraphSpace).asNode(); while(stIter.hasNext()){ Statement st=stIter.next(); if(!quadGraph.contains(null,st.getSubject().asNode(), st.getPredicate().asNode(), st.getObject().asNode())){ quadGraph.add(defgraph, st.getSubject().asNode(), st.getPredicate().asNode(), st.getObject().asNode()); } } } RDFDataMgr.write(out, quadGraph, Lang.TRIG); // or NQUADS } @Override public String[] toStringArray() { return toString().split("\n"); } @Override public String toString() { ByteArrayOutputStream baos = new ByteArrayOutputStream(); writeNTRIPLE(new PrintStream(baos)); return new String(baos.toByteArray()); } }
package org.apache.maven.plugins.site.run; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; import org.apache.maven.doxia.siterenderer.DocumentRenderer; import org.apache.maven.doxia.siterenderer.SiteRenderingContext; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugin.MojoFailureException; import org.apache.maven.plugins.annotations.Mojo; import org.apache.maven.plugins.annotations.Parameter; import org.apache.maven.plugins.site.render.AbstractSiteRenderingMojo; import org.apache.maven.reporting.exec.MavenReportExecution; import org.codehaus.plexus.util.IOUtil; import org.mortbay.jetty.Connector; import org.mortbay.jetty.Handler; import org.mortbay.jetty.Server; import org.mortbay.jetty.handler.DefaultHandler; import org.mortbay.jetty.nio.SelectChannelConnector; import org.mortbay.jetty.webapp.WebAppContext; /** * Starts the site up, rendering documents as requested for faster editing. * It uses Jetty as the web server. * * @author <a href="mailto:brett@apache.org">Brett Porter</a> * @version $Id$ */ @Mojo( name = "run", aggregator = true, requiresReports = true ) public class SiteRunMojo extends AbstractSiteRenderingMojo { /** * Where to create the dummy web application. */ @Parameter( defaultValue = "${project.build.directory}/site-webapp" ) private File tempWebappDirectory; /** * The port to execute the HTTP server on. */ @Parameter( property = "port", defaultValue = "8080" ) private int port; private static final int MAX_IDLE_TIME = 30000; /** * @see org.apache.maven.plugin.AbstractMojo#execute() */ public void execute() throws MojoExecutionException, MojoFailureException { checkInputEncoding(); Server server = new Server(); server.setStopAtShutdown( true ); Connector defaultConnector = getDefaultConnector(); server.setConnectors( new Connector[] { defaultConnector } ); WebAppContext webapp = createWebApplication(); webapp.setServer( server ); DefaultHandler defaultHandler = new DefaultHandler(); defaultHandler.setServer( server ); Handler[] handlers = new Handler[2]; handlers[0] = webapp; handlers[1] = defaultHandler; server.setHandlers( handlers ); getLog().info( "Starting Jetty on http://localhost:" + port + "/" ); try { server.start(); } catch ( Exception e ) { throw new MojoExecutionException( "Error executing Jetty: " + e.getMessage(), e ); } // Watch it try { server.getThreadPool().join(); } catch ( InterruptedException e ) { getLog().warn( "Jetty was interrupted", e ); } } private WebAppContext createWebApplication() throws MojoExecutionException { File webXml = new File( tempWebappDirectory, "WEB-INF/web.xml" ); webXml.getParentFile().mkdirs(); InputStream inStream = null; FileOutputStream outStream = null; try { inStream = getClass().getResourceAsStream( "/run/web.xml" ); outStream = new FileOutputStream( webXml ); IOUtil.copy( inStream, outStream ); outStream.close(); outStream = null; inStream.close(); inStream = null; } catch ( FileNotFoundException e ) { throw new MojoExecutionException( "Unable to construct temporary webapp for running site", e ); } catch ( IOException e ) { throw new MojoExecutionException( "Unable to construct temporary webapp for running site", e ); } finally { IOUtil.close( outStream ); IOUtil.close( inStream ); } WebAppContext webapp = new WebAppContext(); webapp.setContextPath( "/" ); webapp.setResourceBase( tempWebappDirectory.getAbsolutePath() ); webapp.setAttribute( DoxiaFilter.SITE_RENDERER_KEY, siteRenderer ); webapp.getInitParams().put( "org.mortbay.jetty.servlet.Default.useFileMappedBuffer", "false" ); // For external reports project.getReporting().setOutputDirectory( tempWebappDirectory.getAbsolutePath() ); for ( MavenReportExecution mavenReportExecution : getReports() ) { mavenReportExecution.getMavenReport().setReportOutputDirectory( tempWebappDirectory ); } List<MavenReportExecution> reports = getReports(); // TODO: is it sane to call getReports() method a second time? List<Locale> localesList = getLocales(); webapp.setAttribute( DoxiaFilter.LOCALES_LIST_KEY, localesList ); // Default is first in the list Locale defaultLocale = localesList.get( 0 ); Locale.setDefault( defaultLocale ); try { Map<String, DoxiaBean> i18nDoxiaContexts = new HashMap<String, DoxiaBean>(); for ( Locale locale : localesList ) { SiteRenderingContext i18nContext = createSiteRenderingContext( locale ); i18nContext.setInputEncoding( getInputEncoding() ); i18nContext.setOutputEncoding( getOutputEncoding() ); SiteRenderingContext i18nGeneratedSiteContext = createSiteRenderingContext( locale ); i18nGeneratedSiteContext.setInputEncoding( getInputEncoding() ); i18nGeneratedSiteContext.setOutputEncoding( getOutputEncoding() ); i18nGeneratedSiteContext.getSiteDirectories().clear(); Map<String, DocumentRenderer> i18nDocuments = locateDocuments( i18nContext, reports, locale ); DoxiaBean doxiaBean; if ( defaultLocale.equals( locale ) ) { i18nGeneratedSiteContext.addSiteDirectory( generatedSiteDirectory ); doxiaBean = new DoxiaBean( i18nContext, i18nDocuments, i18nGeneratedSiteContext ); } else { i18nGeneratedSiteContext.addSiteDirectory( new File( generatedSiteDirectory, locale.getLanguage() ) ); doxiaBean = new DoxiaBean( i18nContext, i18nDocuments, i18nGeneratedSiteContext ); } i18nDoxiaContexts.put( locale.getLanguage(), doxiaBean ); if ( defaultLocale.equals( locale ) ) { i18nDoxiaContexts.put( "default", doxiaBean ); } if ( defaultLocale.equals( locale ) ) { siteRenderer.copyResources( i18nContext, tempWebappDirectory ); } else { siteRenderer.copyResources( i18nContext, new File( tempWebappDirectory, locale.getLanguage() ) ); } } webapp.setAttribute( DoxiaFilter.I18N_DOXIA_CONTEXTS_KEY, i18nDoxiaContexts ); } catch ( Exception e ) { throw new MojoExecutionException( "Unable to set up webapp", e ); } return webapp; } private Connector getDefaultConnector() { Connector connector = new SelectChannelConnector(); connector.setPort( port ); connector.setMaxIdleTime( MAX_IDLE_TIME ); return connector; } public void setTempWebappDirectory( File tempWebappDirectory ) { this.tempWebappDirectory = tempWebappDirectory; } public void setPort( int port ) { this.port = port; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.slf4j.event.Level; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy; import org.apache.hadoop.hdfs.protocol.SystemErasureCodingPolicies; import org.apache.hadoop.hdfs.server.blockmanagement.BlockPlacementPolicy; import org.apache.hadoop.hdfs.server.datanode.DataNode; import org.apache.hadoop.hdfs.web.WebHdfsConstants; import org.apache.hadoop.hdfs.web.WebHdfsTestUtil; import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.test.GenericTestUtils; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.junit.Rule; import org.junit.rules.Timeout; import java.io.IOException; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.Random; public class TestWriteReadStripedFile { public static final Logger LOG = LoggerFactory.getLogger(TestWriteReadStripedFile.class); private final ErasureCodingPolicy ecPolicy = SystemErasureCodingPolicies.getByID( SystemErasureCodingPolicies.RS_3_2_POLICY_ID); private final int cellSize = ecPolicy.getCellSize(); private final short dataBlocks = (short) ecPolicy.getNumDataUnits(); private final short parityBlocks = (short) ecPolicy.getNumParityUnits(); private final int numDNs = dataBlocks + parityBlocks; private final int stripesPerBlock = 2; private final int blockSize = stripesPerBlock * cellSize; private final int blockGroupSize = blockSize * dataBlocks; private MiniDFSCluster cluster; private DistributedFileSystem fs; private Configuration conf = new HdfsConfiguration(); static { GenericTestUtils.setLogLevel(DFSOutputStream.LOG, Level.TRACE); GenericTestUtils.setLogLevel(DataStreamer.LOG, Level.TRACE); GenericTestUtils.setLogLevel(DFSClient.LOG, Level.TRACE); GenericTestUtils.setLogLevel(BlockPlacementPolicy.LOG, Level.TRACE); } @Rule public Timeout globalTimeout = new Timeout(300000); @Before public void setup() throws IOException { conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, blockSize); cluster = new MiniDFSCluster.Builder(conf).numDataNodes(numDNs).build(); fs = cluster.getFileSystem(); fs.enableErasureCodingPolicy(ecPolicy.getName()); fs.mkdirs(new Path("/ec")); cluster.getFileSystem().getClient().setErasureCodingPolicy("/ec", ecPolicy.getName()); } @After public void tearDown() throws IOException { if (cluster != null) { cluster.shutdown(); cluster = null; } } @Test public void testFileEmpty() throws Exception { testOneFileUsingDFSStripedInputStream("/ec/EmptyFile", 0); testOneFileUsingDFSStripedInputStream("/ec/EmptyFile2", 0, true); } @Test public void testFileSmallerThanOneCell1() throws Exception { testOneFileUsingDFSStripedInputStream("/ec/SmallerThanOneCell", 1); testOneFileUsingDFSStripedInputStream("/ec/SmallerThanOneCell2", 1, true); } @Test public void testFileSmallerThanOneCell2() throws Exception { testOneFileUsingDFSStripedInputStream("/ec/SmallerThanOneCell", cellSize - 1); testOneFileUsingDFSStripedInputStream("/ec/SmallerThanOneCell2", cellSize - 1, true); } @Test public void testFileEqualsWithOneCell() throws Exception { testOneFileUsingDFSStripedInputStream("/ec/EqualsWithOneCell", cellSize); testOneFileUsingDFSStripedInputStream("/ec/EqualsWithOneCell2", cellSize, true); } @Test public void testFileSmallerThanOneStripe1() throws Exception { testOneFileUsingDFSStripedInputStream("/ec/SmallerThanOneStripe", cellSize * dataBlocks - 1); testOneFileUsingDFSStripedInputStream("/ec/SmallerThanOneStripe2", cellSize * dataBlocks - 1, true); } @Test public void testFileSmallerThanOneStripe2() throws Exception { testOneFileUsingDFSStripedInputStream("/ec/SmallerThanOneStripe", cellSize + 123); testOneFileUsingDFSStripedInputStream("/ec/SmallerThanOneStripe2", cellSize + 123, true); } @Test public void testFileEqualsWithOneStripe() throws Exception { testOneFileUsingDFSStripedInputStream("/ec/EqualsWithOneStripe", cellSize * dataBlocks); testOneFileUsingDFSStripedInputStream("/ec/EqualsWithOneStripe2", cellSize * dataBlocks, true); } @Test public void testFileMoreThanOneStripe1() throws Exception { testOneFileUsingDFSStripedInputStream("/ec/MoreThanOneStripe1", cellSize * dataBlocks + 123); testOneFileUsingDFSStripedInputStream("/ec/MoreThanOneStripe12", cellSize * dataBlocks + 123, true); } @Test public void testFileMoreThanOneStripe2() throws Exception { testOneFileUsingDFSStripedInputStream("/ec/MoreThanOneStripe2", cellSize * dataBlocks + cellSize * dataBlocks + 123); testOneFileUsingDFSStripedInputStream("/ec/MoreThanOneStripe22", cellSize * dataBlocks + cellSize * dataBlocks + 123, true); } @Test public void testLessThanFullBlockGroup() throws Exception { testOneFileUsingDFSStripedInputStream("/ec/LessThanFullBlockGroup", cellSize * dataBlocks * (stripesPerBlock - 1) + cellSize); testOneFileUsingDFSStripedInputStream("/ec/LessThanFullBlockGroup2", cellSize * dataBlocks * (stripesPerBlock - 1) + cellSize, true); } @Test public void testFileFullBlockGroup() throws Exception { testOneFileUsingDFSStripedInputStream("/ec/FullBlockGroup", blockSize * dataBlocks); testOneFileUsingDFSStripedInputStream("/ec/FullBlockGroup2", blockSize * dataBlocks, true); } @Test public void testFileMoreThanABlockGroup1() throws Exception { testOneFileUsingDFSStripedInputStream("/ec/MoreThanABlockGroup1", blockSize * dataBlocks + 123); testOneFileUsingDFSStripedInputStream("/ec/MoreThanABlockGroup12", blockSize * dataBlocks + 123, true); } @Test public void testFileMoreThanABlockGroup2() throws Exception { testOneFileUsingDFSStripedInputStream("/ec/MoreThanABlockGroup2", blockSize * dataBlocks + cellSize + 123); testOneFileUsingDFSStripedInputStream("/ec/MoreThanABlockGroup22", blockSize * dataBlocks + cellSize + 123, true); } @Test public void testFileMoreThanABlockGroup3() throws Exception { testOneFileUsingDFSStripedInputStream("/ec/MoreThanABlockGroup3", blockSize * dataBlocks * 3 + cellSize * dataBlocks + cellSize + 123); testOneFileUsingDFSStripedInputStream("/ec/MoreThanABlockGroup32", blockSize * dataBlocks * 3 + cellSize * dataBlocks + cellSize + 123, true); } private void testOneFileUsingDFSStripedInputStream(String src, int fileLength) throws Exception { testOneFileUsingDFSStripedInputStream(src, fileLength, false); } private void testOneFileUsingDFSStripedInputStream(String src, int fileLength, boolean withDataNodeFailure) throws Exception { final byte[] expected = StripedFileTestUtil.generateBytes(fileLength); Path srcPath = new Path(src); DFSTestUtil.writeFile(fs, srcPath, new String(expected)); StripedFileTestUtil.waitBlockGroupsReported(fs, src); StripedFileTestUtil.verifyLength(fs, srcPath, fileLength); if (withDataNodeFailure) { int dnIndex = 1; // TODO: StripedFileTestUtil.random.nextInt(dataBlocks); LOG.info("stop DataNode " + dnIndex); stopDataNode(srcPath, dnIndex); } byte[] smallBuf = new byte[1024]; byte[] largeBuf = new byte[fileLength + 100]; StripedFileTestUtil.verifyPread(fs, srcPath, fileLength, expected, largeBuf); StripedFileTestUtil.verifyStatefulRead(fs, srcPath, fileLength, expected, largeBuf); StripedFileTestUtil.verifySeek(fs, srcPath, fileLength, ecPolicy, blockGroupSize); StripedFileTestUtil.verifyStatefulRead(fs, srcPath, fileLength, expected, ByteBuffer.allocate(fileLength + 100)); StripedFileTestUtil.verifyStatefulRead(fs, srcPath, fileLength, expected, smallBuf); StripedFileTestUtil.verifyStatefulRead(fs, srcPath, fileLength, expected, ByteBuffer.allocate(1024)); } private void stopDataNode(Path path, int failedDNIdx) throws IOException { BlockLocation[] locs = fs.getFileBlockLocations(path, 0, cellSize); if (locs != null && locs.length > 0) { String name = (locs[0].getNames())[failedDNIdx]; for (DataNode dn : cluster.getDataNodes()) { int port = dn.getXferPort(); if (name.contains(Integer.toString(port))) { dn.shutdown(); break; } } } } @Test public void testWriteReadUsingWebHdfs() throws Exception { int fileLength = blockSize * dataBlocks + cellSize + 123; final byte[] expected = StripedFileTestUtil.generateBytes(fileLength); FileSystem fs = WebHdfsTestUtil.getWebHdfsFileSystem(conf, WebHdfsConstants.WEBHDFS_SCHEME); Path srcPath = new Path("/testWriteReadUsingWebHdfs"); DFSTestUtil.writeFile(fs, srcPath, new String(expected)); StripedFileTestUtil.verifyLength(fs, srcPath, fileLength); byte[] smallBuf = new byte[1024]; byte[] largeBuf = new byte[fileLength + 100]; StripedFileTestUtil .verifyPread(fs, srcPath, fileLength, expected, largeBuf, ecPolicy); StripedFileTestUtil .verifyStatefulRead(fs, srcPath, fileLength, expected, largeBuf); StripedFileTestUtil.verifySeek(fs, srcPath, fileLength, ecPolicy, blockGroupSize); StripedFileTestUtil .verifyStatefulRead(fs, srcPath, fileLength, expected, smallBuf); // webhdfs doesn't support bytebuffer read } @Test public void testConcat() throws Exception { final byte[] data = StripedFileTestUtil.generateBytes(blockSize * dataBlocks * 10 + 234); int totalLength = 0; Random r = new Random(); Path target = new Path("/ec/testConcat_target"); DFSTestUtil.writeFile(fs, target, Arrays.copyOfRange(data, 0, 123)); totalLength += 123; int numFiles = 5; Path[] srcs = new Path[numFiles]; for (int i = 0; i < numFiles; i++) { srcs[i] = new Path("/ec/testConcat_src_file_" + i); int srcLength = r.nextInt(blockSize * dataBlocks * 2) + 1; DFSTestUtil.writeFile(fs, srcs[i], Arrays.copyOfRange(data, totalLength, totalLength + srcLength)); totalLength += srcLength; } fs.concat(target, srcs); StripedFileTestUtil.verifyStatefulRead(fs, target, totalLength, Arrays.copyOfRange(data, 0, totalLength), new byte[1024]); } @Test public void testConcatWithDifferentECPolicy() throws Exception { final byte[] data = StripedFileTestUtil.generateBytes(blockSize * dataBlocks); Path nonECFile = new Path("/non_ec_file"); DFSTestUtil.writeFile(fs, nonECFile, data); Path target = new Path("/ec/non_ec_file"); fs.rename(nonECFile, target); int numFiles = 2; Path[] srcs = new Path[numFiles]; for (int i = 0; i < numFiles; i++) { srcs[i] = new Path("/ec/testConcat_src_file_"+i); DFSTestUtil.writeFile(fs, srcs[i], data); } try { fs.concat(target, srcs); Assert.fail("non-ec file shouldn't concat with ec file"); } catch (RemoteException e){ Assert.assertTrue(e.getMessage() .contains("have different erasure coding policy")); } } }
/********************************************************************* Copyright 2014 the Flapi authors Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ********************************************************************/ package unquietcode.tools.flapi.java; import com.google.common.collect.ImmutableSet; import unquietcode.tools.flapi.Pair; import javax.lang.model.SourceVersion; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; /** * @author Ben Fagin * @version 03-03-2012 */ public class MethodSignature implements Comparable<MethodSignature> { public final String methodName; public final JavaType returnType; public final List<Pair<JavaType, String>> params = new ArrayList<Pair<JavaType, String>>(); public final String varargName; public final JavaType varargType; public final String originalSignature; // working private char[] signature; private int cur = 0; public MethodSignature(String methodSignature) throws ParseException { if (methodSignature == null) { throw new IllegalArgumentException("method signature cannot be null"); } methodSignature = methodSignature.trim(); originalSignature = methodSignature; signature = originalSignature.toCharArray(); boolean seenVarargs = false; JavaType _varargType = null; String _varargName = null; JavaType _returnType; String _methodName; // RETURN TYPE _returnType = matchType(); match(WS, -1); // METHOD NAME try { _methodName = matchIdentifier(); } catch (ParseException ex) { // means the type was the method name, which means no return type _methodName = _returnType.typeName; _returnType = null; } match(WS, -1); // LPAREN match(LP, 1); match(WS, -1); // parameters while (true) { // PARAMETER TYPE JavaType pType; try { pType = matchType(); match(WS, -1); } catch (ParseException ex) { break; // no params } // VARARGS try { match(DOT, 3); match(WS, -1); seenVarargs = true; } catch (ParseException ex) { // nothing } String pName = matchIdentifier(); match(WS, -1); if (peek(LB, 1)) { if (pType.arrayDepth > 0) { throwGeneralException("duplicate array declaration"); } int arrayDepth = matchArray(); pType = new JavaType(pType, arrayDepth); } if (seenVarargs) { _varargType = pType; _varargName = pName; break; // varargs are always last } else { params.add(new Pair<JavaType, String>(pType, pName)); } // COMMA if (peek(COMMA, 1)) { match(COMMA, 1); match(WS, -1); } else { match(WS, -1); break; } } // RPAREN match(RP, 1); // check for duplicate parameter names checkForDuplicateParameters(); // set variables returnType = _returnType; methodName = _methodName; varargName = _varargName; varargType = _varargType; // clean up working set signature = null; } private void checkForDuplicateParameters() throws ParseException { Set<String> seen = new HashSet<String>(); for (Pair<JavaType, String> param : params) { if (seen.contains(param.second)) { throw new ParseException("Duplicate parameter name: '"+param.second+"'."); } else { seen.add(param.second); } } } private JavaType matchType() throws ParseException { String typeName; int arrayDepth = 0; List<JavaType> typeParameters = new ArrayList<JavaType>(); match(WS, -1); typeName = matchIdentifier(); match(WS, -1); if (peek(LB, 1)) { arrayDepth = matchArray(); } if (peek(LAB, 1)) { if (arrayDepth > 0) { throwGeneralException("invalid array declaration"); } match(LAB, 1); while (true) { match(WS, -1); // wildcard if (peek(QM, 1)) { match(QM, 1); typeParameters.add(JavaType.wildcard()); } // normal else { typeParameters.add(matchType()); } match(WS, -1); if (peek(COMMA, 1)) { match(COMMA, 1); } else { break; } } match(RAB, 1); } match(WS, -1); if (peek(LB, 1)) { arrayDepth = matchArray(); } return new JavaType(typeName, arrayDepth, typeParameters); } private int matchArray() throws ParseException { int depth = 0; match(WS, -1); while (peek(LB, 1)) { match(LB, 1); match(WS, -1); match(RB, 1); match(WS, -1); ++depth; } return depth; } private String matchIdentifier() throws ParseException { StringBuilder sb = new StringBuilder(); match(WS, -1); sb.append(match(ID_START, 1)).append(match(ID, -1)); while (true) { // vararg if (peek(DOT, 3)) { break; } // partial identifier if (peek(DOT, 1)) { match(DOT, 1); sb.append('.'); sb.append(match(ID_START, 1)).append(match(ID, -1)); } else { break; } } return sb.toString(); } private String match(Set<Character> chars, int count) throws ParseException { StringBuilder sb = new StringBuilder(); while (count != 0) { if (cur >= signature.length) { throwGeneralException("Unexpected EOF."); } if (chars.contains(signature[cur])) { sb.append(signature[cur]); } else { if (count < 0) { break; } else { throwUnexpectedCharException(chars); } } ++cur; --count; } return sb.toString(); } private boolean peek(Set <Character> chars, int count) { int lcur = cur; while (count != 0) { if (chars.contains(signature[lcur])) { // nothing } else { if (count < 0) { break; } else { return false; } } ++lcur; --count; } return true; } private void throwGeneralException(String message) throws ParseException { StringBuilder sb = new StringBuilder(); sb.append(message) .append(" (method signature is [ '").append(new String(signature)).append("' ])"); throw new ParseException(sb.toString()); } private void throwUnexpectedCharException(Set<Character> chars) throws ParseException { StringBuilder sb = new StringBuilder(); boolean first = true; sb.append("Expected to find character in "); if (chars == ID_START) { sb.append("[A-Za-z0-9_$]"); } else if (chars == ID) { sb.append("[A-Za-z_$]"); } else { sb.append("{"); for (Character aChar : chars) { if (!first) { sb.append(", "); } else { first = false; } sb.append(aChar); } sb.append("}"); } sb.append(" but was "); if (cur >= signature.length) { sb.append("empty"); } else { sb.append("'").append(signature[cur]).append("'"); } sb.append(" (method signature is [ '").append(new String(signature)).append("' ])."); throw new ParseException(sb.toString()); } public static class ValidationException extends Exception { ValidationException(String message) { super(message); } } private static final Set<Character> LP = ImmutableSet.of('('); private static final Set<Character> RP = ImmutableSet.of(')'); private static final Set<Character> WS = ImmutableSet.of('\t', ' ', '\n', '\r'); private static final Set<Character> COMMA = ImmutableSet.of(','); private static final Set<Character> DOT = ImmutableSet.of('.'); private static final Set<Character> LAB = ImmutableSet.of('<'); private static final Set<Character> RAB = ImmutableSet.of('>'); private static final Set<Character> LB = ImmutableSet.of('['); private static final Set<Character> RB = ImmutableSet.of(']'); private static final Set<Character> QM = ImmutableSet.of('?'); private static final Set<Character> ID_START = ImmutableSet.of( 'A','B','C','D','E','F','G','H','I','J','K','L','M','N','O','P','Q', 'R','S','T','U','V','W','X','Y','Z','a','b','c','d','e','f','g','h', 'i','j','k','l','m','n','o','p','q','r','s','t','u','v','w','x','y', 'z','$','_' ); private static final Set<Character> ID = ImmutableSet.<Character>builder() .addAll(ID_START) .add('0', '1', '2', '3', '4', '5', '6', '7', '8', '9') .build(); //-------------------------------------------------------------------------------------------// public boolean compilerEquivalent(MethodSignature other) { // same name if (!methodName.equals(other.methodName)) { return false; } // synthesize new parameter arrays, accounting for varargs List<Pair<JavaType, String>> newParams = new ArrayList<Pair<JavaType, String>>(params); if (varargType != null) { newParams.add(new Pair<JavaType, String>(new JavaType(varargType, 1), varargName)); } List<Pair<JavaType, String>> newOtherParams = new ArrayList<Pair<JavaType, String>>(other.params); if (other.varargType != null) { newOtherParams.add(new Pair<JavaType, String>(new JavaType(other.varargType, 1), other.varargName)); } // parameter size if (newParams.size() != newOtherParams.size()) { return false; } // parameter types for (int i=0; i < newParams.size(); ++i) { Pair<JavaType,String> p1 = newParams.get(i); Pair<JavaType,String> p2 = newOtherParams.get(i); if (!p1.first.compilerEquivalent(p2.first)) { return false; } } // and same return type if (returnType == null && other.returnType != null) { if (!other.returnType.typeName.equals("void")) { return false; } } if (returnType != null && other.returnType == null) { if (!returnType.typeName.equals("void")) { return false; } } if (returnType != null && other.returnType != null) { if (!returnType.typeName.equals(other.returnType.typeName)) { return false; } } // otherwise, equal (probably) return true; } public void validate() throws ValidationException { // check that method name is legitimate if (!SourceVersion.isName(methodName)) { throw new ValidationException( "Invalid method name: '"+ originalSignature +"'." ); } // check that parameters are also valid names for (Pair<JavaType, String> param : params) { if (!SourceVersion.isName(param.second)) { throw new ValidationException( "Invalid parameter name '"+param.second+"' in method '"+originalSignature+"'." ); } } // check vararg name if (varargName != null) { if (!SourceVersion.isName(varargName)) { throw new ValidationException( "Invalid vararg parameter name '"+varargName+"' in method '"+originalSignature+"'." ); } } } // return the number of parameters, accounting for varargs public int parameterCount() { return params.size() + (varargType != null ? 1 : 0); } @Override public String toString() { StringBuilder signature = new StringBuilder(); // method return type if (returnType != null) { signature.append(returnType.toString()).append(" "); } else { signature.append("void "); } // method name signature.append(methodName).append("("); // parameters for (int i = 0; i < params.size(); i++) { Pair<JavaType, String> param = params.get(i); if (i != 0) { signature.append(", "); } // parameter type signature.append(param.first.toString()); // parameter name signature.append(" ").append(param.second); } // vararg parameter if (varargType != null) { if (params.size() > 0) { signature.append(", "); } signature.append(varargType.toString()).append("...").append(varargName); } signature.append(")"); return signature.toString(); } @Override public int compareTo(MethodSignature other) { return this.toString().compareTo(other.toString()); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.db.marshal; import java.nio.ByteBuffer; import java.io.IOException; import java.util.List; import java.util.Iterator; import org.apache.cassandra.config.ColumnDefinition; import org.apache.cassandra.cql3.CQL3Type; import org.apache.cassandra.cql3.ColumnSpecification; import org.apache.cassandra.cql3.Lists; import org.apache.cassandra.cql3.Maps; import org.apache.cassandra.cql3.Sets; import org.apache.cassandra.db.rows.Cell; import org.apache.cassandra.db.rows.CellPath; import org.apache.cassandra.io.util.DataInputPlus; import org.apache.cassandra.io.util.DataOutputPlus; import org.apache.cassandra.serializers.CollectionSerializer; import org.apache.cassandra.serializers.MarshalException; import org.apache.cassandra.utils.ByteBufferUtil; /** * The abstract validator that is the base for maps, sets and lists (both frozen and non-frozen). * * Please note that this comparator shouldn't be used "manually" (through thrift for instance). */ public abstract class CollectionType<T> extends AbstractType<T> { public static CellPath.Serializer cellPathSerializer = new CollectionPathSerializer(); public enum Kind { MAP { public ColumnSpecification makeCollectionReceiver(ColumnSpecification collection, boolean isKey) { return isKey ? Maps.keySpecOf(collection) : Maps.valueSpecOf(collection); } }, SET { public ColumnSpecification makeCollectionReceiver(ColumnSpecification collection, boolean isKey) { return Sets.valueSpecOf(collection); } }, LIST { public ColumnSpecification makeCollectionReceiver(ColumnSpecification collection, boolean isKey) { return Lists.valueSpecOf(collection); } }; public abstract ColumnSpecification makeCollectionReceiver(ColumnSpecification collection, boolean isKey); } public final Kind kind; protected CollectionType(ComparisonType comparisonType, Kind kind) { super(comparisonType); this.kind = kind; } public abstract AbstractType<?> nameComparator(); public abstract AbstractType<?> valueComparator(); protected abstract List<ByteBuffer> serializedValues(Iterator<Cell> cells); @Override public abstract CollectionSerializer<T> getSerializer(); public ColumnSpecification makeCollectionReceiver(ColumnSpecification collection, boolean isKey) { return kind.makeCollectionReceiver(collection, isKey); } public String getString(ByteBuffer bytes) { return BytesType.instance.getString(bytes); } public ByteBuffer fromString(String source) { try { return ByteBufferUtil.hexToBytes(source); } catch (NumberFormatException e) { throw new MarshalException(String.format("cannot parse '%s' as hex bytes", source), e); } } public boolean isCollection() { return true; } @Override public void validateCellValue(ByteBuffer cellValue) throws MarshalException { if (isMultiCell()) valueComparator().validateCellValue(cellValue); else super.validateCellValue(cellValue); } /** * Checks if this collection is Map. * @return <code>true</code> if this collection is a Map, <code>false</code> otherwise. */ public boolean isMap() { return kind == Kind.MAP; } // Overrided by maps protected int collectionSize(List<ByteBuffer> values) { return values.size(); } public ByteBuffer serializeForNativeProtocol(ColumnDefinition def, Iterator<Cell> cells, int version) { assert isMultiCell(); List<ByteBuffer> values = serializedValues(cells); int size = collectionSize(values); return CollectionSerializer.pack(values, size, version); } @Override public boolean isCompatibleWith(AbstractType<?> previous) { if (this == previous) return true; if (!getClass().equals(previous.getClass())) return false; CollectionType tprev = (CollectionType) previous; if (this.isMultiCell() != tprev.isMultiCell()) return false; // subclasses should handle compatibility checks for frozen collections if (!this.isMultiCell()) return isCompatibleWithFrozen(tprev); if (!this.nameComparator().isCompatibleWith(tprev.nameComparator())) return false; // the value comparator is only used for Cell values, so sorting doesn't matter return this.valueComparator().isValueCompatibleWith(tprev.valueComparator()); } @Override public boolean isValueCompatibleWithInternal(AbstractType<?> previous) { // for multi-cell collections, compatibility and value-compatibility are the same if (this.isMultiCell()) return isCompatibleWith(previous); if (this == previous) return true; if (!getClass().equals(previous.getClass())) return false; CollectionType tprev = (CollectionType) previous; if (this.isMultiCell() != tprev.isMultiCell()) return false; // subclasses should handle compatibility checks for frozen collections return isValueCompatibleWithFrozen(tprev); } /** A version of isCompatibleWith() to deal with non-multicell (frozen) collections */ protected abstract boolean isCompatibleWithFrozen(CollectionType<?> previous); /** A version of isValueCompatibleWith() to deal with non-multicell (frozen) collections */ protected abstract boolean isValueCompatibleWithFrozen(CollectionType<?> previous); public CQL3Type asCQL3Type() { return new CQL3Type.Collection(this); } @Override public String toString() { return this.toString(false); } private static class CollectionPathSerializer implements CellPath.Serializer { public void serialize(CellPath path, DataOutputPlus out) throws IOException { ByteBufferUtil.writeWithVIntLength(path.get(0), out); } public CellPath deserialize(DataInputPlus in) throws IOException { return CellPath.create(ByteBufferUtil.readWithVIntLength(in)); } public long serializedSize(CellPath path) { return ByteBufferUtil.serializedSizeWithVIntLength(path.get(0)); } public void skip(DataInputPlus in) throws IOException { ByteBufferUtil.skipWithVIntLength(in); } } }
/* Copyright 2010-2022 BusinessCode GmbH, Germany Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package de.businesscode.bcdui.web.servlets; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.PrintWriter; import java.net.URL; import java.util.ArrayList; import java.util.Collections; import java.util.Enumeration; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Properties; import java.util.stream.Collectors; import javax.servlet.ServletConfig; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import org.apache.commons.codec.digest.DigestUtils; import org.apache.commons.io.IOUtils; import org.apache.commons.lang.StringEscapeUtils; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.LogManager; import org.apache.shiro.SecurityUtils; import org.apache.shiro.UnavailableSecurityManagerException; import org.apache.shiro.subject.Subject; import de.businesscode.bcdui.logging.PageSqlLogger; import de.businesscode.bcdui.subjectsettings.SecurityHelper; import de.businesscode.bcdui.toolbox.Configuration; import de.businesscode.bcdui.toolbox.ServletUtils; import de.businesscode.bcdui.web.accessLogging.RequestHashGenerator; import de.businesscode.bcdui.web.i18n.I18n; import de.businesscode.bcdui.web.taglib.webpage.Functions; import de.businesscode.util.StandardNamespaceContext; public class BCDUIConfig extends HttpServlet { private static final long serialVersionUID = 1L; private static final String clientConfigFilePath="/WEB-INF/clientLog.properties"; private Logger log = LogManager.getLogger(this.getClass()); private final Logger virtLoggerPage = LogManager.getLogger("de.businesscode.bcdui.logging.virtlogger.page"); private String configJson; public final static String LIB_ROOT_FOLDER_NAME="bcdui"; @Override protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { response.setContentType(getServletContext().getMimeType(".js")); URL realPath = getServletContext().getResource(clientConfigFilePath); Properties properties = new Properties(); if( realPath != null && realPath.getPath() != null ) { File propFile = new File(realPath.getPath()); if (propFile.canRead()) properties.load(new FileInputStream(propFile)); } boolean isDebug = ServletUtils.getInstance().isFeDebug(request); Boolean environmentValue = Boolean.FALSE; try {environmentValue = (Boolean)Configuration.getInstance().getConfigurationParameter("bcdui/serverHasRequestUrlLimit");}catch(Exception e) {} PrintWriter writer = new PrintWriter(response.getWriter()); writer.println("var bcdui = bcdui || {};"); writer.println("bcdui.core = bcdui.core || {};"); writer.println("bcdui.config = {"); writeClientParams(writer); writer.println(" contextPath: \"" + getServletContext().getContextPath() + "\""); // write version info writer.println(" , ceVersion: \"" + getVersion("BCD-UI") + "\""); writer.println(" , eeVersion: \"" + getVersion("BCD-UI-EE") + "\""); writer.println(" , deVersion: \"" + getVersion("BCD-UI-DE") + "\""); // IIS has a limit also for http request URLs, i.e. data requests writer.println(" , serverHasRequestUrlLimit: " + environmentValue.toString()); // write authenticate information try { final Subject subject = SecurityUtils.getSubject(); if(subject.isAuthenticated() ) { String userLogin = SecurityHelper.getUserLogin(subject); userLogin = (userLogin == null) ? "null" : "'" + StringEscapeUtils.escapeJavaScript(userLogin) + "'"; String userId = SecurityHelper.getUserId(subject); userId = (userId == null) ? "null" : "'" + StringEscapeUtils.escapeJavaScript(userId) + "'"; writer.println(" , isAuthenticated: true"); writer.println(" , userName: " + userLogin ); // js null or js string with name; backwards compatible (in future may be removed; is to be replaced by .userLogin) writer.println(" , userLogin: " + userLogin ); // js null or js string with user login; writer.println(" , userId: " + userId ); // js null or js string with user id; // write userRoles writer.println(" , userRoles : {"); writer.print(SecurityHelper.getRoles(subject).stream().map(s->{ return "\"" + StringEscapeUtils.escapeJavaScript(s) + "\" : 1"; // define property as true to enable lookup w/o .hasOwnProperty() }).collect(Collectors.joining(","))); writer.println(" }"); } // write bcdClient security settings as bcdui.config.clientRights object values writer.println(" , clientRights: {"); // get bcdClient permissions once via subjectPreferences (so you directly got values on very 1st request) // and once via SecurityHelper use HashSet to avoid duplicates (after 1st request) HashSet<String> clientSubjectPreferences = new HashSet<>(SubjectPreferences.getPermissionList("bcdClient:", true)); HashSet<String> clientPermissions = subject.isAuthenticated() ? new HashSet<>(SecurityHelper.getPermissions(subject, "bcdClient")) : new HashSet<>(); clientPermissions.addAll(clientSubjectPreferences); ArrayList<String> sortedPerms = new ArrayList<>(clientPermissions); Collections.sort(sortedPerms); if (! sortedPerms.isEmpty()) { boolean onceInner = true; boolean onceOuter = true; String lastRight = ""; for (String s : sortedPerms) { int x = s.indexOf(":"); String right = (x != -1 ? s.substring(0, x) : s).trim(); String value = (x != -1 ? s.substring(x + 1) : "").trim(); boolean isBoolean = "true".equalsIgnoreCase(value) || "false".equalsIgnoreCase(value); boolean isInteger = false; try { Integer.parseInt(value); isInteger = true; } catch (Exception e) {} if (! right.isEmpty()) { if (lastRight.isEmpty()) { writer.println((onceOuter ? "" : ",") + right + ": ["); onceOuter = false; } else if (!lastRight.equals(right)) { writer.println("]"); writer.println("," + right + ": ["); onceInner = true; } writer.println((onceInner ? "" : ",") + (isInteger || isBoolean ? ( "" + value.toLowerCase() + "") : ( "\"" + value + "\""))); onceInner = false; lastRight = right; } } if (! onceOuter) writer.println("]"); } writer.println("}"); } catch (UnavailableSecurityManagerException e) { // don't use shiro at all? writer.println(" , isAuthenticated: false"); writer.println(" , userName: null"); writer.println(" , clientRights:{}"); } writer.println(" , sessionHash: \"" + ( getSessionHash(request) ) + "\""); writer.println(" , i18n: { \"langSubjectFilterName\":\"" + I18n.SUBJECT_FILTER_TYPE + "\", \"lang\" : \"" + getLang(request) + "\"}"); writer.println(" , debug: " + isDebug); writer.println(" , isDebug: " + isDebug); writer.println(" , libPath: \"" + getServletContext().getContextPath() + "/"+LIB_ROOT_FOLDER_NAME+"/\""); writer.println(" , jsLibPath: \"" + getServletContext().getContextPath() + "/"+LIB_ROOT_FOLDER_NAME+"/js/\""); if(! properties.isEmpty()){ if(properties.getProperty("LEVEL") != null) writer.println(" , clientLogLevel: \"" + properties.getProperty("LEVEL").trim()+"\""); if(properties.getProperty("appender") != null) writer.println(" , clientLogAppenderJSClassName: \"" + properties.getProperty("appender").trim()+"\""); } // generate unique pageHash String pageHash = RequestHashGenerator.generatePageHash(request); writer.println(" , frame: { pageHash: \"" + pageHash + "\" }"); // App-wide config from /bcdui/conf/configuration.json if( configJson!=null && !configJson.isEmpty() ) writer.println(" , settings: " + configJson ); writer.println("};"); writer.println(""); writer.println("bcdui.core.xmlConstants = {"); writer.println(StandardNamespaceContext.getInstance().getAsJs()); writer.println("};"); if( ! "true".equals( request.getParameter("bcduiConfigOnly") ) ) writer.println("document.write(\"<script type='text/javascript' src='" + request.getContextPath() + response.encodeURL("/bcdui/js/bcduiLoader.js") + "'></script>\");"); String sessionId = (request != null && request.getSession(false) != null ? request.getSession(false).getId() : ""); if( log.isDebugEnabled() ) log.debug("PageHash "+pageHash+" for "+request.getHeader("Referer")+", "+sessionId); // log page if(PageSqlLogger.getInstance().isEnabled()) { final PageSqlLogger.LogRecord logRecord = new PageSqlLogger.LogRecord(sessionId, request.getHeader("Referer"), pageHash); virtLoggerPage.info(logRecord); // was level DEBUG } } /** * @param request * @return a language currently active, is never null. */ private String getLang(HttpServletRequest request) { return I18n.getUserLocale(request).getLanguage(); } /** * returns session hash (SHA-1) because we want to tag session on the client, * the hashing algorithm may change at any time, no one should rely on it. * * @param request * @return hash or empty String */ private synchronized String getSessionHash(HttpServletRequest request) { HttpSession session = request.getSession(false); if(session == null){ return ""; } // cache due to expensive generation String hash = (String)session.getAttribute("bcdSessionHash"); if(hash == null) { hash = DigestUtils.sha1Hex( session.getId() ); session.setAttribute("bcdSessionHash", hash); } return hash; } /* * write comma separated parameters for client */ private void writeClientParams(PrintWriter writer) { final HashMap<String, Object> clientParams = Configuration.getInstance().getClientParameters(); if(clientParams != null && clientParams.size()>0){ StringBuilder sb = new StringBuilder(); for(Map.Entry<String, Object> param : clientParams.entrySet()){ sb.append(Functions.jsString(param.getKey())).append(" : ").append(Functions.jsString(param.getValue())).append(",").append(System.getProperty("line.separator")); } writer.append(sb.toString()); } } @Override public void init(ServletConfig config) throws ServletException { super.init(config); // If present, load app-wide configuration InputStream confIs = getServletContext().getResourceAsStream("/bcdui/conf/settings.json"); if( confIs != null ) { try { configJson = IOUtils.toString(confIs, "UTF-8"); confIs.close(); } catch (IOException e) { throw new ServletException(e); } } } private static String getVersion(String moduleName) { try { Enumeration<URL> resources = Thread.currentThread().getContextClassLoader().getResources("META-INF/gitInformation/" + moduleName + "_info.txt"); if (resources.hasMoreElements()) { BufferedReader br = new BufferedReader(new InputStreamReader(resources.nextElement().openStream())); String commitHash = ""; String branchName = ""; String versionName = ""; final String branch = "Branch:"; final String commit = "Commit:"; final String version = "Version:"; String line = br.readLine(); while (line != null) { if (line.contains(branch)) branchName = line.substring(branch.length() + line.indexOf(branch)).trim(); if (line.contains(commit)) commitHash = line.substring(commit.length() + line.indexOf(commit)).trim(); if (line.contains(version)) versionName = line.substring(version.length() + line.indexOf(version)).trim(); line = br.readLine(); } br.close(); return versionName + " " + branchName + " [" + commitHash + "]"; } } catch (Exception e) { /* ignore */ } return ""; } }
/* * (c) Copyright 2021 Palantir Technologies Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.palantir.lock.client; import com.google.common.annotations.VisibleForTesting; import com.palantir.atlasdb.correctness.TimestampCorrectnessMetrics; import com.palantir.atlasdb.timelock.api.ConjureGetFreshTimestampsRequest; import com.palantir.atlasdb.timelock.api.ConjureGetFreshTimestampsResponse; import com.palantir.atlasdb.timelock.api.ConjureLockRequest; import com.palantir.atlasdb.timelock.api.ConjureLockResponse; import com.palantir.atlasdb.timelock.api.ConjureRefreshLocksRequest; import com.palantir.atlasdb.timelock.api.ConjureRefreshLocksResponse; import com.palantir.atlasdb.timelock.api.ConjureStartTransactionsRequest; import com.palantir.atlasdb.timelock.api.ConjureStartTransactionsResponse; import com.palantir.atlasdb.timelock.api.ConjureUnlockRequest; import com.palantir.atlasdb.timelock.api.ConjureUnlockResponse; import com.palantir.atlasdb.timelock.api.ConjureWaitForLocksResponse; import com.palantir.atlasdb.timelock.api.GetCommitTimestampsRequest; import com.palantir.atlasdb.timelock.api.GetCommitTimestampsResponse; import com.palantir.lock.v2.LeaderTime; import com.palantir.logsafe.SafeArg; import com.palantir.logsafe.exceptions.SafeRuntimeException; import com.palantir.logsafe.logger.SafeLogger; import com.palantir.logsafe.logger.SafeLoggerFactory; import com.palantir.tritium.metrics.registry.TaggedMetricRegistry; import java.time.Instant; import java.util.Optional; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Supplier; import java.util.function.ToLongFunction; import java.util.stream.Stream; import org.immutables.value.Value; public final class TimestampCorroboratingTimelockService implements NamespacedConjureTimelockService { private static final SafeLogger log = SafeLoggerFactory.get(TimestampCorroboratingTimelockService.class); private static final String CLOCKS_WENT_BACKWARDS_MESSAGE = "It appears that clocks went backwards!"; private final Runnable timestampViolationCallback; private final NamespacedConjureTimelockService delegate; private final AtomicReference<TimestampBoundsRecord> lowerBoundFromFreshTimestamps = new AtomicReference<>(getDefaultTimestampBoundsRecord(OperationType.FRESH_TIMESTAMP)); private final AtomicReference<TimestampBoundsRecord> lowerBoundFromCommitTimestamps = new AtomicReference<>(getDefaultTimestampBoundsRecord(OperationType.COMMIT_TIMESTAMP)); private final AtomicReference<TimestampBoundsRecord> lowerBoundFromTransaction = new AtomicReference<>(getDefaultTimestampBoundsRecord(OperationType.TRANSACTION)); @VisibleForTesting TimestampCorroboratingTimelockService( Runnable timestampViolationCallback, NamespacedConjureTimelockService delegate) { this.timestampViolationCallback = timestampViolationCallback; this.delegate = delegate; } public static NamespacedConjureTimelockService create( String userNamespace, TaggedMetricRegistry taggedMetricRegistry, NamespacedConjureTimelockService delegate) { return new TimestampCorroboratingTimelockService( () -> TimestampCorrectnessMetrics.of(taggedMetricRegistry) .timestampsGoingBackwards(userNamespace) .inc(), delegate); } @Override public ConjureUnlockResponse unlock(ConjureUnlockRequest request) { return delegate.unlock(request); } @Override public ConjureRefreshLocksResponse refreshLocks(ConjureRefreshLocksRequest request) { return delegate.refreshLocks(request); } @Override public ConjureWaitForLocksResponse waitForLocks(ConjureLockRequest request) { return delegate.waitForLocks(request); } @Override public ConjureLockResponse lock(ConjureLockRequest request) { return delegate.lock(request); } @Override public LeaderTime leaderTime() { return delegate.leaderTime(); } @Override public GetCommitTimestampsResponse getCommitTimestamps(GetCommitTimestampsRequest request) { return checkAndUpdateLowerBound( () -> delegate.getCommitTimestamps(request), GetCommitTimestampsResponse::getInclusiveLower, GetCommitTimestampsResponse::getInclusiveUpper, OperationType.COMMIT_TIMESTAMP); } @Override public ConjureGetFreshTimestampsResponse getFreshTimestamps(ConjureGetFreshTimestampsRequest request) { return checkAndUpdateLowerBound( () -> delegate.getFreshTimestamps(request), ConjureGetFreshTimestampsResponse::getInclusiveLower, ConjureGetFreshTimestampsResponse::getInclusiveUpper, OperationType.FRESH_TIMESTAMP); } @Override public ConjureStartTransactionsResponse startTransactions(ConjureStartTransactionsRequest request) { return checkAndUpdateLowerBound( () -> delegate.startTransactions(request), r -> r.getTimestamps().start(), r -> r.getTimestamps().start() + ((r.getTimestamps().count() - 1L) * r.getTimestamps().interval()), OperationType.TRANSACTION); } private <T> T checkAndUpdateLowerBound( Supplier<T> timestampContainerSupplier, ToLongFunction<T> lowerBoundExtractor, ToLongFunction<T> upperBoundExtractor, OperationType operationType) { // take snapshot before making the request Instant wallClockTimeBeforeRequest = Instant.now(); TimestampBounds timestampBounds = getTimestampBounds(); T timestampContainer = timestampContainerSupplier.get(); // take snapshot after making the request Instant wallClockTimeAfterRequest = Instant.now(); long lowerFreshTimestamp = lowerBoundExtractor.applyAsLong(timestampContainer); long upperFreshTimestamp = upperBoundExtractor.applyAsLong(timestampContainer); TimestampBoundsRecord currentTimestampsBoundsRecord = ImmutableTimestampBoundsRecord.builder() .operationType(operationType) .inclusiveLowerBoundFromLastRequest(lowerFreshTimestamp) .inclusiveUpperBoundFromLastRequest(upperFreshTimestamp) .wallClockTimeBeforeRequest(wallClockTimeBeforeRequest) .wallClockTimeAfterResponse(wallClockTimeAfterRequest) .build(); checkTimestamp(timestampBounds, currentTimestampsBoundsRecord, lowerFreshTimestamp); updateLowerBound(currentTimestampsBoundsRecord); return timestampContainer; } @VisibleForTesting TimestampBounds getTimestampBounds() { return ImmutableTimestampBounds.builder() .boundFromFreshTimestamps(lowerBoundFromFreshTimestamps.get()) .boundFromCommitTimestamps(lowerBoundFromCommitTimestamps.get()) .boundFromTransactions(lowerBoundFromTransaction.get()) .build(); } private void checkTimestamp( TimestampBounds bounds, TimestampBoundsRecord currentBoundsRecord, long lowerFreshTimestamp) { if (lowerFreshTimestamp <= bounds.getMaxLowerBound()) { timestampViolationCallback.run(); throw clocksWentBackwards(bounds, currentBoundsRecord); } } private static RuntimeException clocksWentBackwards( TimestampBounds bounds, TimestampBoundsRecord currentBoundsRecord) { RuntimeException runtimeException = new SafeRuntimeException(CLOCKS_WENT_BACKWARDS_MESSAGE); log.error( CLOCKS_WENT_BACKWARDS_MESSAGE + ": bounds were {}, operation {}, fresh timestamp of {}.", SafeArg.of("persistedBounds", bounds), SafeArg.of("boundsRecordForCurrentRequest", currentBoundsRecord), runtimeException); throw runtimeException; } private void updateLowerBound(TimestampBoundsRecord boundsRecord) { switch (boundsRecord.operationType()) { case FRESH_TIMESTAMP: lowerBoundFromFreshTimestamps.accumulateAndGet(boundsRecord, this::maxBoundsRecord); return; case COMMIT_TIMESTAMP: lowerBoundFromCommitTimestamps.accumulateAndGet(boundsRecord, this::maxBoundsRecord); return; case TRANSACTION: lowerBoundFromTransaction.accumulateAndGet(boundsRecord, this::maxBoundsRecord); return; } } private TimestampBoundsRecord maxBoundsRecord(TimestampBoundsRecord prev, TimestampBoundsRecord newRecord) { return prev.lowerBoundForNextRequest() > newRecord.lowerBoundForNextRequest() ? prev : newRecord; } @Value.Immutable interface TimestampBounds { @Value.Parameter TimestampBoundsRecord boundFromFreshTimestamps(); @Value.Parameter TimestampBoundsRecord boundFromCommitTimestamps(); @Value.Parameter TimestampBoundsRecord boundFromTransactions(); default long getMaxLowerBound() { return Stream.of(boundFromTransactions(), boundFromCommitTimestamps(), boundFromFreshTimestamps()) .map(TimestampBoundsRecord::lowerBoundForNextRequest) .reduce(Math::max) .orElse(Long.MIN_VALUE); } } @Value.Immutable interface TimestampBoundsRecord { Optional<Long> inclusiveLowerBoundFromLastRequest(); long inclusiveUpperBoundFromLastRequest(); OperationType operationType(); Optional<Instant> wallClockTimeBeforeRequest(); Instant wallClockTimeAfterResponse(); @Value.Derived default long lowerBoundForNextRequest() { return inclusiveUpperBoundFromLastRequest(); } } private static TimestampBoundsRecord getDefaultTimestampBoundsRecord(OperationType operationType) { return ImmutableTimestampBoundsRecord.builder() .operationType(operationType) .inclusiveUpperBoundFromLastRequest(Long.MIN_VALUE) .wallClockTimeAfterResponse(Instant.now()) .build(); } enum OperationType { FRESH_TIMESTAMP, COMMIT_TIMESTAMP, TRANSACTION; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.platform.compute; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.UUID; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.IgniteCompute; import org.apache.ignite.internal.IgniteComputeImpl; import org.apache.ignite.internal.IgniteInternalFuture; import org.apache.ignite.internal.portable.PortableObjectImpl; import org.apache.ignite.internal.portable.PortableRawReaderEx; import org.apache.ignite.internal.portable.PortableRawWriterEx; import org.apache.ignite.internal.processors.platform.PlatformAbstractTarget; import org.apache.ignite.internal.processors.platform.PlatformContext; import org.apache.ignite.internal.util.typedef.C1; import org.apache.ignite.lang.IgniteFuture; import org.apache.ignite.lang.IgniteInClosure; import org.apache.ignite.portable.PortableObject; import static org.apache.ignite.internal.processors.task.GridTaskThreadContextKey.TC_SUBGRID; /** * Interop compute. */ @SuppressWarnings({"unchecked", "ThrowableResultOfMethodCallIgnored", "UnusedDeclaration"}) public class PlatformCompute extends PlatformAbstractTarget { /** */ private static final int OP_AFFINITY = 1; /** */ private static final int OP_BROADCAST = 2; /** */ private static final int OP_EXEC = 3; /** */ private static final int OP_EXEC_ASYNC = 4; /** */ private static final int OP_UNICAST = 5; /** Compute instance. */ private final IgniteComputeImpl compute; /** Future for previous asynchronous operation. */ protected ThreadLocal<IgniteFuture<?>> curFut = new ThreadLocal<>(); /** * Constructor. * * @param platformCtx Context. * @param compute Compute instance. */ public PlatformCompute(PlatformContext platformCtx, IgniteComputeImpl compute) { super(platformCtx); this.compute = compute; } /** {@inheritDoc} */ @Override protected long processInStreamOutLong(int type, PortableRawReaderEx reader) throws IgniteCheckedException { switch (type) { case OP_UNICAST: processClosures(reader.readLong(), reader, false, false); return TRUE; case OP_BROADCAST: processClosures(reader.readLong(), reader, true, false); return TRUE; case OP_AFFINITY: processClosures(reader.readLong(), reader, false, true); return TRUE; default: return super.processInStreamOutLong(type, reader); } } /** * Process closure execution request. * * @param taskPtr Task pointer. * @param reader Reader. * @param broadcast broadcast flag. */ private void processClosures(long taskPtr, PortableRawReaderEx reader, boolean broadcast, boolean affinity) { PlatformAbstractTask task; int size = reader.readInt(); if (size == 1) { if (broadcast) { PlatformBroadcastingSingleClosureTask task0 = new PlatformBroadcastingSingleClosureTask(platformCtx, taskPtr); task0.job(nextClosureJob(task0, reader)); task = task0; } else if (affinity) { PlatformBalancingSingleClosureAffinityTask task0 = new PlatformBalancingSingleClosureAffinityTask(platformCtx, taskPtr); task0.job(nextClosureJob(task0, reader)); task0.affinity(reader.readString(), reader.readObjectDetached(), platformCtx.kernalContext()); task = task0; } else { PlatformBalancingSingleClosureTask task0 = new PlatformBalancingSingleClosureTask(platformCtx, taskPtr); task0.job(nextClosureJob(task0, reader)); task = task0; } } else { if (broadcast) task = new PlatformBroadcastingMultiClosureTask(platformCtx, taskPtr); else task = new PlatformBalancingMultiClosureTask(platformCtx, taskPtr); Collection<PlatformJob> jobs = new ArrayList<>(size); for (int i = 0; i < size; i++) jobs.add(nextClosureJob(task, reader)); if (broadcast) ((PlatformBroadcastingMultiClosureTask)task).jobs(jobs); else ((PlatformBalancingMultiClosureTask)task).jobs(jobs); } platformCtx.kernalContext().task().setThreadContext(TC_SUBGRID, compute.clusterGroup().nodes()); executeNative0(task); } /** * Read the next closure job from the reader. * * @param task Task. * @param reader Reader. * @return Closure job. */ private PlatformJob nextClosureJob(PlatformAbstractTask task, PortableRawReaderEx reader) { return platformCtx.createClosureJob(task, reader.readLong(), reader.readObjectDetached()); } /** {@inheritDoc} */ @Override protected void processInStreamOutStream(int type, PortableRawReaderEx reader, PortableRawWriterEx writer) throws IgniteCheckedException { switch (type) { case OP_EXEC: writer.writeObjectDetached(executeJavaTask(reader, false)); break; case OP_EXEC_ASYNC: writer.writeObjectDetached(executeJavaTask(reader, true)); break; default: super.processInStreamOutStream(type, reader, writer); } } /** * Execute native full-fledged task. * * @param taskPtr Pointer to the task. * @param topVer Topology version. */ public void executeNative(long taskPtr, long topVer) { final PlatformFullTask task = new PlatformFullTask(platformCtx, compute, taskPtr, topVer); executeNative0(task); } /** * Set "withTimeout" state. * * @param timeout Timeout (milliseconds). */ public void withTimeout(long timeout) { compute.withTimeout(timeout); } /** * Set "withNoFailover" state. */ public void withNoFailover() { compute.withNoFailover(); } /** <inheritDoc /> */ @Override protected IgniteFuture currentFuture() throws IgniteCheckedException { IgniteFuture<?> fut = curFut.get(); if (fut == null) throw new IllegalStateException("Asynchronous operation not started."); return fut; } /** * Execute task. * * @param task Task. */ private void executeNative0(final PlatformAbstractTask task) { IgniteInternalFuture fut = compute.executeAsync(task, null); fut.listen(new IgniteInClosure<IgniteInternalFuture>() { private static final long serialVersionUID = 0L; @Override public void apply(IgniteInternalFuture fut) { try { fut.get(); task.onDone(null); } catch (IgniteCheckedException e) { task.onDone(e); } } }); } /** * Execute task taking arguments from the given reader. * * @param reader Reader. * @return Task result. */ protected Object executeJavaTask(PortableRawReaderEx reader, boolean async) { String taskName = reader.readString(); boolean keepPortable = reader.readBoolean(); Object arg = reader.readObjectDetached(); Collection<UUID> nodeIds = readNodeIds(reader); IgniteCompute compute0 = computeForTask(nodeIds); if (async) compute0 = compute0.withAsync(); if (!keepPortable && arg instanceof PortableObjectImpl) arg = ((PortableObject)arg).deserialize(); Object res = compute0.execute(taskName, arg); if (async) { curFut.set(compute0.future().chain(new C1<IgniteFuture, Object>() { private static final long serialVersionUID = 0L; @Override public Object apply(IgniteFuture fut) { return toPortable(fut.get()); } })); return null; } else return toPortable(res); } /** * Convert object to portable form. * * @param src Source object. * @return Result. */ private Object toPortable(Object src) { return platformCtx.kernalContext().grid().portables().toPortable(src); } /** * Read node IDs. * * @param reader Reader. * @return Node IDs. */ protected Collection<UUID> readNodeIds(PortableRawReaderEx reader) { if (reader.readBoolean()) { int len = reader.readInt(); List<UUID> res = new ArrayList<>(len); for (int i = 0; i < len; i++) res.add(reader.readUuid()); return res; } else return null; } /** * Get compute object for the given node IDs. * * @param nodeIds Node IDs. * @return Compute object. */ protected IgniteCompute computeForTask(Collection<UUID> nodeIds) { return nodeIds == null ? compute : platformCtx.kernalContext().grid().compute(compute.clusterGroup().forNodeIds(nodeIds)); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.ql.exec; import java.io.Serializable; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.TreeSet; import org.apache.hadoop.hive.ql.exec.tez.ReduceRecordSource; import org.apache.hadoop.hive.ql.util.NullOrdering; import org.apache.hadoop.hive.serde.serdeConstants; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.CompilationOpContext; import org.apache.hadoop.hive.ql.exec.persistence.RowContainer; import org.apache.hadoop.hive.ql.exec.tez.InterruptibleProcessing; import org.apache.hadoop.hive.ql.exec.tez.RecordSource; import org.apache.hadoop.hive.ql.exec.tez.TezContext; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.CommonMergeJoinDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.ql.plan.JoinCondDesc; import org.apache.hadoop.hive.ql.plan.JoinDesc; import org.apache.hadoop.hive.ql.plan.OperatorDesc; import org.apache.hadoop.hive.ql.plan.api.OperatorType; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; import org.apache.hadoop.io.WritableComparator; /* * With an aim to consolidate the join algorithms to either hash based joins (MapJoinOperator) or * sort-merge based joins, this operator is being introduced. This operator executes a sort-merge * based algorithm. It replaces both the JoinOperator and the SMBMapJoinOperator for the tez side of * things. It works in either the map phase or reduce phase. * * The basic algorithm is as follows: * * 1. The processOp receives a row from a "big" table. * 2. In order to process it, the operator does a fetch for rows from the other tables. * 3. Once we have a set of rows from the other tables (till we hit a new key), more rows are * brought in from the big table and a join is performed. */ public class CommonMergeJoinOperator extends AbstractMapJoinOperator<CommonMergeJoinDesc> implements Serializable { private static final long serialVersionUID = 1L; private boolean isBigTableWork; private static final Logger LOG = LoggerFactory.getLogger(CommonMergeJoinOperator.class.getName()); transient List<Object>[] keyWritables; transient List<Object>[] nextKeyWritables; transient RowContainer<List<Object>>[] nextGroupStorage; transient RowContainer<List<Object>>[] candidateStorage; transient RowContainer<List<Object>>[] unmatchedStorage; transient String[] tagToAlias; private transient boolean[] fetchDone; private transient boolean[] foundNextKeyGroup; transient boolean firstFetchHappened = false; transient boolean localWorkInited = false; transient boolean initDone = false; transient List<Object> otherKey = null; transient List<Object> values = null; transient RecordSource[] sources; transient WritableComparator[][] keyComparators; transient List<Operator<? extends OperatorDesc>> originalParents = new ArrayList<Operator<? extends OperatorDesc>>(); transient Set<Integer> fetchInputAtClose; // A field because we cannot multi-inherit. transient InterruptibleProcessing interruptChecker; transient NullOrdering nullOrdering; transient private boolean shortcutUnmatchedRows; /** Kryo ctor. */ protected CommonMergeJoinOperator() { super(); } public CommonMergeJoinOperator(CompilationOpContext ctx) { super(ctx); } @SuppressWarnings("unchecked") @Override public void initializeOp(Configuration hconf) throws HiveException { super.initializeOp(hconf); firstFetchHappened = false; fetchInputAtClose = getFetchInputAtCloseList(); int maxAlias = 0; for (byte pos = 0; pos < order.length; pos++) { if (pos > maxAlias) { maxAlias = pos; } } maxAlias += 1; nextGroupStorage = new RowContainer[maxAlias]; candidateStorage = new RowContainer[maxAlias]; unmatchedStorage = new RowContainer[maxAlias]; keyWritables = new ArrayList[maxAlias]; nextKeyWritables = new ArrayList[maxAlias]; fetchDone = new boolean[maxAlias]; foundNextKeyGroup = new boolean[maxAlias]; keyComparators = new WritableComparator[maxAlias][]; for (Entry<Byte, List<ExprNodeDesc>> entry : conf.getKeys().entrySet()) { keyComparators[entry.getKey().intValue()] = new WritableComparator[entry.getValue().size()]; } int bucketSize; int oldVar = HiveConf.getIntVar(hconf, HiveConf.ConfVars.HIVEMAPJOINBUCKETCACHESIZE); shortcutUnmatchedRows = HiveConf.getBoolVar(hconf, HiveConf.ConfVars.HIVE_JOIN_SHORTCUT_UNMATCHED_ROWS); if (oldVar != 100) { bucketSize = oldVar; } else { bucketSize = HiveConf.getIntVar(hconf, HiveConf.ConfVars.HIVESMBJOINCACHEROWS); } for (byte pos = 0; pos < order.length; pos++) { RowContainer<List<Object>> rc = JoinUtil.getRowContainer(hconf, rowContainerStandardObjectInspectors[pos], pos, bucketSize, spillTableDesc, conf, !hasFilter(pos), reporter); nextGroupStorage[pos] = rc; RowContainer<List<Object>> candidateRC = JoinUtil.getRowContainer(hconf, rowContainerStandardObjectInspectors[pos], pos, bucketSize, spillTableDesc, conf, !hasFilter(pos), reporter); candidateStorage[pos] = candidateRC; RowContainer<List<Object>> unmatchedRC = JoinUtil.getRowContainer(hconf, rowContainerStandardObjectInspectors[pos], pos, bucketSize, spillTableDesc, conf, !hasFilter(pos), reporter); unmatchedStorage[pos] = unmatchedRC; } for (byte pos = 0; pos < order.length; pos++) { if (pos != posBigTable) { if ((parentOperators != null) && !parentOperators.isEmpty() && (parentOperators.get(pos) instanceof TezDummyStoreOperator)) { TezDummyStoreOperator dummyStoreOp = (TezDummyStoreOperator) parentOperators.get(pos); fetchDone[pos] = dummyStoreOp.getFetchDone(); } else { fetchDone[pos] = false; } } foundNextKeyGroup[pos] = false; } sources = ((TezContext) MapredContext.get()).getRecordSources(); interruptChecker = new InterruptibleProcessing(); nullOrdering = NullOrdering.NULLS_FIRST; if (sources[0] instanceof ReduceRecordSource) { ReduceRecordSource reduceRecordSource = (ReduceRecordSource) sources[0]; if (reduceRecordSource.getKeyTableDesc() != null && reduceRecordSource.getKeyTableDesc().getProperties() != null) { String nullSortOrder = reduceRecordSource.getKeyTableDesc().getProperties() .getProperty(serdeConstants.SERIALIZATION_NULL_SORT_ORDER); if (nullOrdering != null && !nullSortOrder.isEmpty()) { nullOrdering = NullOrdering.fromSign(nullSortOrder.charAt(0)); } } nullOrdering = NullOrdering.defaultNullOrder(hconf); } if (parentOperators != null && !parentOperators.isEmpty()) { // Tell RecordSource to flush last record even if its a map side SMB. SMB expect its // parent group by operators to emit the record as and when aggregation is done. // In case of group by with FINAL/MERGE_PARTIAL mode, the records are expected to come // in a sorted order to group by operator and the group by operator is suppose to // emit the aggregated value to next node once a record with different value // is received. In case we dont flush here, the last aggregate value will not be // emitted as it will keep waiting for the next different record. for (RecordSource source : sources) { source.setFlushLastRecord(true); } } } /* * In case of outer joins, we need to push records through even if one of the sides is done * sending records. For e.g. In the case of full outer join, the right side needs to send in data * for the join even after the left side has completed sending all the records on its side. This * can be done once at initialize time and at close, these tags will still forward records until * they have no more to send. Also, subsequent joins need to fetch their data as well since * any join following the outer join could produce results with one of the outer sides depending on * the join condition. We could optimize for the case of inner joins in the future here. */ private Set<Integer> getFetchInputAtCloseList() { Set<Integer> retval = new TreeSet<Integer>(); for (JoinCondDesc joinCondDesc : conf.getConds()) { retval.add(joinCondDesc.getLeft()); retval.add(joinCondDesc.getRight()); } return retval; } @Override public void endGroup() throws HiveException { // we do not want the end group to cause a checkAndGenObject defaultEndGroup(); } @Override public void startGroup() throws HiveException { // we do not want the start group to clear the storage defaultStartGroup(); } /* * (non-Javadoc) * * @see org.apache.hadoop.hive.ql.exec.Operator#processOp(java.lang.Object, * int) this processor has a push-pull model. First call to this method is a * push but the rest is pulled until we run out of records. */ @Override public void process(Object row, int tag) throws HiveException { posBigTable = (byte) conf.getBigTablePosition(); byte alias = (byte) tag; List<Object> value = getFilteredValue(alias, row); if (isOuterJoinUnmatchedRow(tag, value)) { int type = condn[0].getType(); if (tag == 0 && (type == JoinDesc.LEFT_OUTER_JOIN || type == JoinDesc.FULL_OUTER_JOIN)) { unmatchedStorage[tag].addRow(value); } if (tag == 1 && (type == JoinDesc.RIGHT_OUTER_JOIN || type == JoinDesc.FULL_OUTER_JOIN)) { unmatchedStorage[tag].addRow(value); } emitUnmatchedRows(tag, false); return; } // compute keys and values as StandardObjects List<Object> key = mergeJoinComputeKeys(row, alias); // Fetch the first group for all small table aliases. doFirstFetchIfNeeded(); //have we reached a new key group? boolean nextKeyGroup = processKey(alias, key); addToAliasFilterTags(alias, value, nextKeyGroup); if (nextKeyGroup) { //assert this.nextGroupStorage[alias].size() == 0; this.nextGroupStorage[alias].addRow(value); foundNextKeyGroup[tag] = true; if (tag != posBigTable) { return; } } else { if ((tag == posBigTable) && (candidateStorage[tag].rowCount() == joinEmitInterval)) { boolean canEmit = true; for (byte i = 0; i < foundNextKeyGroup.length; i++) { if (i == posBigTable) { continue; } if (!foundNextKeyGroup[i]) { canEmit = false; break; } if (compareKeys(i, key, keyWritables[i]) != 0) { canEmit = false; break; } } // we can save ourselves from spilling once we have join emit interval worth of rows. if (canEmit) { LOG.info("We are emitting rows since we hit the join emit interval of " + joinEmitInterval); joinOneGroup(false); candidateStorage[tag].clearRows(); storage[tag].clearRows(); } } } reportProgress(); numMapRowsRead++; // the big table has reached a new key group. try to let the small tables // catch up with the big table. if (nextKeyGroup) { assert tag == posBigTable; List<Byte> listOfFetchNeeded = null; do { listOfFetchNeeded = joinOneGroup(); //jump out the loop if we need input from the big table } while (listOfFetchNeeded != null && listOfFetchNeeded.size() > 0 && !listOfFetchNeeded.contains(this.posBigTable)); return; } assert !nextKeyGroup; candidateStorage[tag].addRow(value); } private void emitUnmatchedRows(int tag, boolean force) throws HiveException { if (unmatchedStorage[tag].rowCount() == 0 || (!force && unmatchedStorage[tag].rowCount() < joinEmitInterval)) { return; } for (byte i = 0; i < order.length; i++) { if (i == tag) { storage[i] = unmatchedStorage[i]; } else { putDummyOrEmpty(i); } } checkAndGenObject(); unmatchedStorage[tag].clearRows(); } /** * Decides if the actual row must be an unmatched row. * * Unmatched rows are those which are not part of the inner-join. * The current implementation has issues processing filtered rows in FOJ conditions. * Putting them in a separate group also reduces processing done for them. */ private boolean isOuterJoinUnmatchedRow(int tag, List<Object> value) { if (!shortcutUnmatchedRows || condn.length != 1) { return false; } switch (condn[0].getType()) { case JoinDesc.INNER_JOIN: case JoinDesc.LEFT_OUTER_JOIN: case JoinDesc.RIGHT_OUTER_JOIN: case JoinDesc.FULL_OUTER_JOIN: break; default: return false; } if (hasFilter(tag)) { short filterTag = getFilterTag(value); if (JoinUtil.isFiltered(filterTag, 1 - tag)) { return true; } } return false; } private List<Byte> joinOneGroup() throws HiveException { return joinOneGroup(true); } private List<Byte> joinOneGroup(boolean clear) throws HiveException { for (int pos = 0; pos < order.length; pos++) { emitUnmatchedRows(pos, true); } int[] smallestPos = findSmallestKey(); List<Byte> listOfNeedFetchNext = null; if (smallestPos != null) { listOfNeedFetchNext = joinObject(smallestPos, clear); if ((listOfNeedFetchNext.size() > 0) && clear) { // listOfNeedFetchNext contains all tables that we have joined data in their // candidateStorage, and we need to clear candidate storage and promote their // nextGroupStorage to candidateStorage and fetch data until we reach a // new group. for (Byte b : listOfNeedFetchNext) { try { fetchNextGroup(b); } catch (Exception e) { throw new HiveException(e); } } } } return listOfNeedFetchNext; } private List<Byte> joinObject(int[] smallestPos, boolean clear) throws HiveException { List<Byte> needFetchList = new ArrayList<Byte>(); byte index = (byte) (smallestPos.length - 1); for (; index >= 0; index--) { if (smallestPos[index] > 0 || keyWritables[index] == null) { putDummyOrEmpty(index); continue; } storage[index] = candidateStorage[index]; if (clear) { needFetchList.add(index); } if (smallestPos[index] < 0) { break; } } for (index--; index >= 0; index--) { putDummyOrEmpty(index); } checkAndGenObject(); if (clear) { for (Byte pos : needFetchList) { this.candidateStorage[pos].clearRows(); this.keyWritables[pos] = null; } } return needFetchList; } private void putDummyOrEmpty(Byte i) { // put a empty list or null if (noOuterJoin) { storage[i] = emptyList; } else { storage[i] = dummyObjVectors[i]; } } private int[] findSmallestKey() { int[] result = new int[order.length]; List<Object> smallestOne = null; for (byte pos = 0; pos < order.length; pos++) { List<Object> key = keyWritables[pos]; if (key == null) { continue; } if (smallestOne == null) { smallestOne = key; result[pos] = -1; continue; } result[pos] = compareKeys(pos, key, smallestOne); if (result[pos] < 0) { smallestOne = key; } } return smallestOne == null ? null : result; } private void fetchNextGroup(Byte t) throws HiveException { if (keyWritables[t] != null) { return; // First process the current key. } if (foundNextKeyGroup[t]) { // first promote the next group to be the current group if we reached a // new group in the previous fetch if (this.nextKeyWritables[t] != null) { promoteNextGroupToCandidate(t); } else { this.keyWritables[t] = null; this.candidateStorage[t] = null; this.nextGroupStorage[t] = null; } foundNextKeyGroup[t] = false; } // for the big table, we only need to promote the next group to the current group. if (t == posBigTable) { return; } // for tables other than the big table, we need to fetch more data until reach a new group or // done. interruptChecker.startAbortChecks(); // Reset the time, we only want to count it in the loop. while (!foundNextKeyGroup[t]) { if (fetchDone[t]) { break; } fetchOneRow(t); try { interruptChecker.addRowAndMaybeCheckAbort(); } catch (InterruptedException e) { throw new HiveException(e); } } if (!foundNextKeyGroup[t] && fetchDone[t]) { this.nextKeyWritables[t] = null; } } @Override public void close(boolean abort) throws HiveException { joinFinalLeftData(); // Do this WITHOUT checking for parents super.close(abort); } @Override public void closeOp(boolean abort) throws HiveException { super.closeOp(abort); // clean up LOG.debug("Cleaning up the operator state"); for (int pos = 0; pos < order.length; pos++) { if (pos != posBigTable) { fetchDone[pos] = false; } foundNextKeyGroup[pos] = false; } } private void fetchOneRow(byte tag) throws HiveException { try { boolean hasMore = sources[tag].pushRecord(); if (fetchDone[tag] && hasMore) { LOG.warn("fetchDone[" + tag + "] was set to true (by a recursive call) and will be reset"); }// TODO: "else {"? This happened in the past due to a bug, see HIVE-11016. fetchDone[tag] = !hasMore; if (sources[tag].isGrouped()) { // instead of maintaining complex state for the fetch of the next group, // we know for sure that at the end of all the values for a given key, // we will definitely reach the next key group. foundNextKeyGroup[tag] = true; } } catch (Exception e) { throw new HiveException(e); } } private void joinFinalLeftData() throws HiveException { @SuppressWarnings("rawtypes") RowContainer bigTblRowContainer = this.candidateStorage[this.posBigTable]; boolean allFetchDone = allFetchDone(); // if all left data in small tables are less than and equal to the left data // in big table, let's them catch up while (bigTblRowContainer != null && bigTblRowContainer.rowCount() > 0 && !allFetchDone) { joinOneGroup(); bigTblRowContainer = this.candidateStorage[this.posBigTable]; allFetchDone = allFetchDone(); } while (!allFetchDone) { List<Byte> ret = joinOneGroup(); // if we are in close op phase, we have definitely exhausted the big table input fetchDone[posBigTable] = true; // First, handle the condition where the first fetch was never done (big table is empty). doFirstFetchIfNeeded(); // in case of outer joins, we need to pull in records from the sides we still // need to produce output for apart from the big table. for e.g. full outer join // TODO: this reproduces the logic of the loop that was here before, assuming // firstFetchHappened == true. In reality it almost always calls joinOneGroup. Fix it? int lastPos = (fetchDone.length - 1); if (posBigTable != lastPos && (fetchInputAtClose.contains(lastPos)) && (fetchDone[lastPos] == false)) { // Do the join. It does fetching of next row groups itself. ret = joinOneGroup(); } if (ret == null || ret.size() == 0) { break; } reportProgress(); numMapRowsRead++; allFetchDone = allFetchDone(); } boolean dataInCache = true; while (dataInCache) { for (byte pos = 0; pos < order.length; pos++) { if (this.foundNextKeyGroup[pos] && this.nextKeyWritables[pos] != null) { fetchNextGroup(pos); } } joinOneGroup(); dataInCache = false; for (byte pos = 0; pos < order.length; pos++) { if (candidateStorage[pos] == null) { continue; } if (this.candidateStorage[pos].hasRows()) { dataInCache = true; break; } } } } private void doFirstFetchIfNeeded() throws HiveException { if (firstFetchHappened) { return; } firstFetchHappened = true; for (byte pos = 0; pos < order.length; pos++) { if (pos != posBigTable) { fetchNextGroup(pos); } } } private boolean allFetchDone() { for (byte pos = 0; pos < order.length; pos++) { if (pos != posBigTable && !fetchDone[pos]) { return false; } } return true; } private void promoteNextGroupToCandidate(Byte t) throws HiveException { this.keyWritables[t] = this.nextKeyWritables[t]; this.nextKeyWritables[t] = null; RowContainer<List<Object>> oldRowContainer = this.candidateStorage[t]; oldRowContainer.clearRows(); this.candidateStorage[t] = this.nextGroupStorage[t]; this.nextGroupStorage[t] = oldRowContainer; } private boolean processKey(byte alias, List<Object> key) throws HiveException { List<Object> keyWritable = keyWritables[alias]; if (keyWritable == null) { // the first group. keyWritables[alias] = key; keyComparators[alias] = new WritableComparator[key.size()]; return false; } else { int cmp = compareKeys(alias, key, keyWritable); if (cmp != 0) { // Cant overwrite existing keys if (nextKeyWritables[alias] != null) { throw new HiveException("Attempting to overwrite nextKeyWritables[" + alias + "]"); } nextKeyWritables[alias] = key; return true; } return false; } } @SuppressWarnings("rawtypes") private int compareKeys(byte alias, List<Object> k1, List<Object> k2) { final WritableComparator[] comparators = keyComparators[alias]; // join keys have difference sizes? if (k1.size() != k2.size()) { return k1.size() - k2.size(); } if (comparators.length == 0) { // cross-product - no keys really return 0; } if (comparators.length > 1) { // rare case return compareKeysMany(comparators, k1, k2); } else { return compareKey(comparators, 0, k1.get(0), k2.get(0), nullsafes != null ? nullsafes[0]: false); } } @SuppressWarnings("rawtypes") private int compareKeysMany(WritableComparator[] comparators, final List<Object> k1, final List<Object> k2) { // invariant: k1.size == k2.size int ret = 0; final int size = k1.size(); for (int i = 0; i < size; i++) { ret = compareKey(comparators, i, k1.get(i), k2.get(i), nullsafes != null ? nullsafes[i] : false); if (ret != 0) { return ret; } } return ret; } @SuppressWarnings("rawtypes") private int compareKey(final WritableComparator comparators[], final int pos, final Object key_1, final Object key_2, final boolean nullsafe) { if (comparators[pos] == null) { comparators[pos] = WritableComparatorFactory.get(key_1, nullsafe, nullOrdering); } return comparators[pos].compare(key_1, key_2); } @SuppressWarnings("unchecked") private List<Object> mergeJoinComputeKeys(Object row, Byte alias) throws HiveException { if ((joinKeysObjectInspectors != null) && (joinKeysObjectInspectors[alias] != null)) { return JoinUtil.computeKeys(row, joinKeys[alias], joinKeysObjectInspectors[alias]); } else { final List<Object> key = new ArrayList<Object>(1); ObjectInspectorUtils.partialCopyToStandardObject(key, row, Utilities.ReduceField.KEY.position, 1, (StructObjectInspector) inputObjInspectors[alias], ObjectInspectorCopyOption.WRITABLE); return (List<Object>) key.get(0); // this is always 0, even if KEY.position is not } } @Override public String getName() { return getOperatorName(); } static public String getOperatorName() { return "MERGEJOIN"; } @Override public OperatorType getType() { return OperatorType.MERGEJOIN; } @Override public void initializeLocalWork(Configuration hconf) throws HiveException { Operator<? extends OperatorDesc> parent = null; for (Operator<? extends OperatorDesc> parentOp : parentOperators) { if (parentOp != null) { parent = parentOp; break; } } if (parent == null) { throw new HiveException("No valid parents."); } if (parentOperators.size() == 1) { Map<Integer, DummyStoreOperator> dummyOps = ((TezContext) (MapredContext.get())).getDummyOpsMap(); for (Entry<Integer, DummyStoreOperator> connectOp : dummyOps.entrySet()) { if (connectOp.getValue().getChildOperators() == null || connectOp.getValue().getChildOperators().isEmpty()) { parentOperators.add(connectOp.getKey(), connectOp.getValue()); connectOp.getValue().getChildOperators().add(this); } } } super.initializeLocalWork(hconf); } public boolean isBigTableWork() { return isBigTableWork; } public void setIsBigTableWork(boolean bigTableWork) { this.isBigTableWork = bigTableWork; } public int getTagForOperator(Operator<? extends OperatorDesc> op) { return originalParents.indexOf(op); } public void cloneOriginalParentsList(List<Operator<? extends OperatorDesc>> opList) { originalParents.addAll(opList); } }
/** * Copyright (C) 2013 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.analytics.financial.instrument.payment; import static org.testng.AssertJUnit.assertEquals; import static org.testng.internal.junit.ArrayAsserts.assertArrayEquals; import org.testng.annotations.Test; import org.threeten.bp.Period; import org.threeten.bp.ZonedDateTime; import com.opengamma.analytics.financial.instrument.index.IborIndex; import com.opengamma.analytics.financial.instrument.index.IndexIborMaster; import com.opengamma.analytics.financial.interestrate.payments.derivative.Coupon; import com.opengamma.analytics.financial.interestrate.payments.derivative.CouponFixed; import com.opengamma.analytics.financial.interestrate.payments.derivative.CouponIborCompoundingSpread; import com.opengamma.analytics.financial.schedule.ScheduleCalculator; import com.opengamma.analytics.util.time.TimeCalculator; import com.opengamma.financial.convention.StubType; import com.opengamma.financial.convention.businessday.BusinessDayConvention; import com.opengamma.financial.convention.businessday.BusinessDayConventions; import com.opengamma.financial.convention.calendar.Calendar; import com.opengamma.financial.convention.calendar.MondayToFridayCalendar; import com.opengamma.timeseries.DoubleTimeSeries; import com.opengamma.timeseries.precise.zdt.ImmutableZonedDateTimeDoubleTimeSeries; import com.opengamma.util.test.TestGroup; import com.opengamma.util.time.DateUtils; /** * Tests related to the building of compounded Ibor coupons. */ @Test(groups = TestGroup.UNIT) public class CouponIborCompoundingSpreadDefinitionTest { private static final Calendar NYC = new MondayToFridayCalendar("NYC"); private static final IndexIborMaster MASTER_IBOR = IndexIborMaster.getInstance(); private static final IborIndex USDLIBOR1M = MASTER_IBOR.getIndex("USDLIBOR1M"); private static final BusinessDayConvention PREC = BusinessDayConventions.PRECEDING; private static final Period TENOR_3M = Period.ofMonths(3); private static final ZonedDateTime START_DATE = DateUtils.getUTCDate(2012, 8, 24); private static final double NOTIONAL = 123454321; private static final double SPREAD = 0.0010; // 10 bps private static final CouponIborCompoundingSpreadDefinition CPN_FROM_INDEX_DEFINITION = CouponIborCompoundingSpreadDefinition.from(NOTIONAL, START_DATE, TENOR_3M, USDLIBOR1M, SPREAD, NYC); private static final ZonedDateTime[] ACCRUAL_END_DATES = ScheduleCalculator.getAdjustedDateSchedule(START_DATE, TENOR_3M, true, false, USDLIBOR1M, NYC); private static final int NB_SUB_PERIOD = ACCRUAL_END_DATES.length; private static final ZonedDateTime[] ACCRUAL_START_DATES = new ZonedDateTime[NB_SUB_PERIOD]; private static final double[] PAYMENT_ACCRUAL_FACTORS = new double[NB_SUB_PERIOD]; private static final double PAYMENT_ACCRUAL_FACTOR; static { ACCRUAL_START_DATES[0] = START_DATE; for (int loopsub = 1; loopsub < NB_SUB_PERIOD; loopsub++) { ACCRUAL_START_DATES[loopsub] = ACCRUAL_END_DATES[loopsub - 1]; } double af = 0.0; for (int loopsub = 0; loopsub < NB_SUB_PERIOD; loopsub++) { PAYMENT_ACCRUAL_FACTORS[loopsub] = USDLIBOR1M.getDayCount().getDayCountFraction(ACCRUAL_START_DATES[loopsub], ACCRUAL_END_DATES[loopsub]); af += PAYMENT_ACCRUAL_FACTORS[loopsub]; } PAYMENT_ACCRUAL_FACTOR = af; } private static final ZonedDateTime[] FIXING_DATES = ScheduleCalculator.getAdjustedDate(ACCRUAL_START_DATES, -USDLIBOR1M.getSpotLag(), NYC); private static final ZonedDateTime[] FIXING_PERIOD_END_DATES = ScheduleCalculator.getAdjustedDate(ACCRUAL_START_DATES, USDLIBOR1M, NYC); private static final double[] FIXING_ACCRUAL_FACTORS = new double[NB_SUB_PERIOD]; static { for (int loopsub = 0; loopsub < NB_SUB_PERIOD; loopsub++) { FIXING_ACCRUAL_FACTORS[loopsub] = USDLIBOR1M.getDayCount().getDayCountFraction(ACCRUAL_START_DATES[loopsub], FIXING_PERIOD_END_DATES[loopsub]); } } private static final ZonedDateTime REFERENCE_DATE = DateUtils.getUTCDate(2012, 8, 17); private static final double[] FIXING_TIMES = TimeCalculator.getTimeBetween(REFERENCE_DATE, FIXING_DATES); private static final double[] FIXING_PERIOD_END_TIMES = TimeCalculator.getTimeBetween(REFERENCE_DATE, FIXING_PERIOD_END_DATES); private static final double[] ACCRUAL_START_TIMES = TimeCalculator.getTimeBetween(REFERENCE_DATE, ACCRUAL_START_DATES); private static final double[] ACCRUAL_END_TIMES = TimeCalculator.getTimeBetween(REFERENCE_DATE, ACCRUAL_END_DATES); private static final double PAYMENT_TIME = ACCRUAL_END_TIMES[NB_SUB_PERIOD - 1]; private static final double[] FIXING_RATES = new double[] {0.0010, 0.0011, 0.0012, 0.0013 }; private static final DoubleTimeSeries<ZonedDateTime> FIXING_TS = ImmutableZonedDateTimeDoubleTimeSeries.ofUTC( new ZonedDateTime[] {DateUtils.getUTCDate(2012, 8, 21), DateUtils.getUTCDate(2012, 8, 22), DateUtils.getUTCDate(2012, 9, 20), DateUtils.getUTCDate(2012, 10, 22) }, FIXING_RATES); @Test public void from() { final CouponIborCompoundingSpreadDefinition cpnFromAccrualDates = CouponIborCompoundingSpreadDefinition.from(ACCRUAL_END_DATES[NB_SUB_PERIOD - 1], NOTIONAL, USDLIBOR1M, ACCRUAL_START_DATES, ACCRUAL_END_DATES, PAYMENT_ACCRUAL_FACTORS, SPREAD, NYC); assertEquals("CouponIborCompoundedDefinition: from", cpnFromAccrualDates, CPN_FROM_INDEX_DEFINITION); assertArrayEquals("CouponIborCompoundedSpreadDefinition: getter", ACCRUAL_START_DATES, CPN_FROM_INDEX_DEFINITION.getAccrualStartDates()); assertArrayEquals("CouponIborCompoundedSpreadDefinition: getter", ACCRUAL_START_DATES, CPN_FROM_INDEX_DEFINITION.getFixingPeriodStartDates()); assertArrayEquals("CouponIborCompoundedSpreadDefinition: getter", ACCRUAL_END_DATES, CPN_FROM_INDEX_DEFINITION.getAccrualEndDates()); assertArrayEquals("CouponIborCompoundedSpreadDefinition: getter", FIXING_DATES, CPN_FROM_INDEX_DEFINITION.getFixingDates()); assertArrayEquals("CouponIborCompoundedSpreadDefinition: getter", FIXING_PERIOD_END_DATES, CPN_FROM_INDEX_DEFINITION.getFixingPeriodEndDates()); assertEquals("CouponIborCompoundedSpreadDefinition: getter", SPREAD, CPN_FROM_INDEX_DEFINITION.getSpread()); int nbSubPeriod = CPN_FROM_INDEX_DEFINITION.getAccrualStartDates().length; for (int loops = 0; loops < nbSubPeriod; loops++) { assertEquals("CouponIborCompoundedSpreadDefinition: dates - " + loops, CPN_FROM_INDEX_DEFINITION.getAccrualEndDates()[loops], ScheduleCalculator.getAdjustedDate(START_DATE, Period.ofMonths(loops + 1), USDLIBOR1M, NYC)); assertEquals("CouponIborCompoundedSpreadDefinition: dates - " + loops, CPN_FROM_INDEX_DEFINITION.getFixingPeriodEndDates()[loops], ScheduleCalculator.getAdjustedDate(CPN_FROM_INDEX_DEFINITION.getFixingPeriodStartDates()[loops], USDLIBOR1M, NYC)); } } @Test public void fromShortStub() { final ZonedDateTime startDate = DateUtils.getUTCDate(2012, 8, 7); final ZonedDateTime endDate = DateUtils.getUTCDate(2012, 11, 23); final CouponIborCompoundingSpreadDefinition cpn = CouponIborCompoundingSpreadDefinition.from(NOTIONAL, startDate, endDate, USDLIBOR1M, SPREAD, StubType.SHORT_START, PREC, true, NYC); assertEquals("CouponIborCompoundedSpreadDefinition: from", startDate, cpn.getAccrualStartDate()); assertEquals("CouponIborCompoundedSpreadDefinition: from", cpn.getAccrualStartDate(), cpn.getAccrualStartDates()[0]); int nbSubPeriod = cpn.getAccrualStartDates().length; for (int loops = 0; loops < nbSubPeriod; loops++) { assertEquals("CouponIborCompoundedSpreadDefinition: dates - " + loops, cpn.getAccrualEndDates()[nbSubPeriod - 1 - loops], ScheduleCalculator.getAdjustedDate(endDate, Period.ofMonths(-loops), PREC, NYC, false)); assertEquals("CouponIborCompoundedSpreadDefinition: dates - " + loops, cpn.getFixingPeriodEndDates()[loops], ScheduleCalculator.getAdjustedDate(cpn.getFixingPeriodStartDates()[loops], USDLIBOR1M, NYC)); } } @Test public void getter() { assertEquals("CouponIborCompoundedSpreadDefinition: getter", USDLIBOR1M, CPN_FROM_INDEX_DEFINITION.getIndex()); assertEquals("CouponIborCompoundedSpreadDefinition: getter", START_DATE, CPN_FROM_INDEX_DEFINITION.getAccrualStartDate()); assertEquals("CouponIborCompoundedSpreadDefinition: getter", START_DATE, CPN_FROM_INDEX_DEFINITION.getAccrualStartDates()[0]); assertEquals("CouponIborCompoundedSpreadDefinition: getter", CPN_FROM_INDEX_DEFINITION.getPaymentDate(), CPN_FROM_INDEX_DEFINITION.getAccrualEndDates()[CPN_FROM_INDEX_DEFINITION.getAccrualEndDates().length - 1]); assertEquals("CouponIborCompoundedSpreadDefinition: getter", SPREAD, CPN_FROM_INDEX_DEFINITION.getSpread()); } @Test(expectedExceptions = IllegalArgumentException.class) public void wrongDate() { CPN_FROM_INDEX_DEFINITION.toDerivative(DateUtils.getUTCDate(2012, 8, 25)); } @Test public void toDerivativeNoTS() { final CouponIborCompoundingSpread cpnConverted = CPN_FROM_INDEX_DEFINITION.toDerivative(REFERENCE_DATE); final CouponIborCompoundingSpread cpnExpected = new CouponIborCompoundingSpread(USDLIBOR1M.getCurrency(), PAYMENT_TIME, PAYMENT_ACCRUAL_FACTOR, NOTIONAL, NOTIONAL, USDLIBOR1M, PAYMENT_ACCRUAL_FACTORS, FIXING_TIMES, ACCRUAL_START_TIMES, FIXING_PERIOD_END_TIMES, FIXING_ACCRUAL_FACTORS, SPREAD); assertEquals("CouponIborCompoundedSpreadDefinition: toDerivatives", cpnExpected, cpnConverted); final Coupon cpnConverted2 = CPN_FROM_INDEX_DEFINITION.toDerivative(REFERENCE_DATE, FIXING_TS); assertEquals("CouponIborCompoundedSpreadDefinition: toDerivatives", cpnExpected, cpnConverted2); } @Test public void toDerivativeAfter1Fixing() { final ZonedDateTime referenceDate = DateUtils.getUTCDate(2012, 8, 28); final double paymentTime = TimeCalculator.getTimeBetween(referenceDate, CPN_FROM_INDEX_DEFINITION.getPaymentDate()); final double accruedNotional = (1.0 + PAYMENT_ACCRUAL_FACTORS[0] * (FIXING_RATES[1] + SPREAD)) * NOTIONAL; final double[] paymentAccrualFactorsLeft = new double[NB_SUB_PERIOD - 1]; System.arraycopy(PAYMENT_ACCRUAL_FACTORS, 1, paymentAccrualFactorsLeft, 0, NB_SUB_PERIOD - 1); final double[] fixingTimesLeft = new double[NB_SUB_PERIOD - 1]; System.arraycopy(TimeCalculator.getTimeBetween(referenceDate, FIXING_DATES), 1, fixingTimesLeft, 0, NB_SUB_PERIOD - 1); final double[] fixingPeriodStartTimesLeft = new double[NB_SUB_PERIOD - 1]; System.arraycopy(TimeCalculator.getTimeBetween(referenceDate, ACCRUAL_START_DATES), 1, fixingPeriodStartTimesLeft, 0, NB_SUB_PERIOD - 1); final double[] fixingPeriodEndTimesLeft = new double[NB_SUB_PERIOD - 1]; System.arraycopy(TimeCalculator.getTimeBetween(referenceDate, FIXING_PERIOD_END_DATES), 1, fixingPeriodEndTimesLeft, 0, NB_SUB_PERIOD - 1); final double[] fixingPeriodAccrualFactorsLeft = new double[NB_SUB_PERIOD - 1]; System.arraycopy(FIXING_ACCRUAL_FACTORS, 1, fixingPeriodAccrualFactorsLeft, 0, NB_SUB_PERIOD - 1); final Coupon cpnConverted = CPN_FROM_INDEX_DEFINITION.toDerivative(referenceDate, FIXING_TS); final CouponIborCompoundingSpread cpnExpected = new CouponIborCompoundingSpread(USDLIBOR1M.getCurrency(), paymentTime, PAYMENT_ACCRUAL_FACTOR, NOTIONAL, accruedNotional, USDLIBOR1M, paymentAccrualFactorsLeft, fixingTimesLeft, fixingPeriodStartTimesLeft, fixingPeriodEndTimesLeft, fixingPeriodAccrualFactorsLeft, SPREAD); assertEquals("CouponIborCompoundedSpreadDefinition: toDerivatives", cpnExpected, cpnConverted); } @Test public void toDerivativeAfter2Fixing() { final ZonedDateTime referenceDate = DateUtils.getUTCDate(2012, 9, 20); final double paymentTime = TimeCalculator.getTimeBetween(referenceDate, CPN_FROM_INDEX_DEFINITION.getPaymentDate()); final double accruedNotional = (1.0 + PAYMENT_ACCRUAL_FACTORS[0] * (FIXING_RATES[1] + SPREAD)) * (1.0 + PAYMENT_ACCRUAL_FACTORS[1] * (FIXING_RATES[2] + SPREAD)) * NOTIONAL; final double[] paymentAccrualFactorsLeft = new double[] {PAYMENT_ACCRUAL_FACTORS[2] }; final double[] fixingTimesLeft = new double[] {TimeCalculator.getTimeBetween(referenceDate, FIXING_DATES[2]) }; final double[] fixingPeriodStartTimesLeft = new double[] {TimeCalculator.getTimeBetween(referenceDate, ACCRUAL_START_DATES[2]) }; final double[] fixingPeriodEndTimesLeft = new double[] {TimeCalculator.getTimeBetween(referenceDate, FIXING_PERIOD_END_DATES[2]) }; final double[] fixingPeriodAccrualFactorsLeft = new double[] {FIXING_ACCRUAL_FACTORS[2] }; final Coupon cpnConverted = CPN_FROM_INDEX_DEFINITION.toDerivative(referenceDate, FIXING_TS); final CouponIborCompoundingSpread cpnExpected = new CouponIborCompoundingSpread(USDLIBOR1M.getCurrency(), paymentTime, PAYMENT_ACCRUAL_FACTOR, NOTIONAL, accruedNotional, USDLIBOR1M, paymentAccrualFactorsLeft, fixingTimesLeft, fixingPeriodStartTimesLeft, fixingPeriodEndTimesLeft, fixingPeriodAccrualFactorsLeft, SPREAD); assertEquals("CouponIborCompoundedSpreadDefinition: toDerivatives", cpnExpected, cpnConverted); } @Test public void toDerivativeAfterLastFixing() { final ZonedDateTime referenceDate = DateUtils.getUTCDate(2012, 10, 25); final Coupon cpnConverted = CPN_FROM_INDEX_DEFINITION.toDerivative(referenceDate, FIXING_TS); final double rate = ((1.0 + PAYMENT_ACCRUAL_FACTORS[0] * (FIXING_RATES[1] + SPREAD)) * (1.0 + PAYMENT_ACCRUAL_FACTORS[1] * (FIXING_RATES[2] + SPREAD)) * (1.0 + PAYMENT_ACCRUAL_FACTORS[2] * (FIXING_RATES[3] + SPREAD)) - 1.0) / PAYMENT_ACCRUAL_FACTOR; final double paymentTime = TimeCalculator.getTimeBetween(referenceDate, CPN_FROM_INDEX_DEFINITION.getPaymentDate()); final CouponFixed cpnExpected = new CouponFixed(USDLIBOR1M.getCurrency(), paymentTime, PAYMENT_ACCRUAL_FACTOR, NOTIONAL, rate, ACCRUAL_START_DATES[0], ACCRUAL_END_DATES[NB_SUB_PERIOD - 1]); assertEquals("CouponIborCompoundedSpreadDefinition: toDerivatives", cpnExpected, cpnConverted); } @Test public void toDerivativeAfter1FixingInitialRate() { final ZonedDateTime referenceDate = DateUtils.getUTCDate(2012, 8, 28); double initialRate = 0.002; final double paymentTime = TimeCalculator.getTimeBetween(referenceDate, CPN_FROM_INDEX_DEFINITION.getPaymentDate()); final double accruedNotional = (1.0 + PAYMENT_ACCRUAL_FACTORS[0] * (initialRate + SPREAD)) * NOTIONAL; final double[] paymentAccrualFactorsLeft = new double[NB_SUB_PERIOD - 1]; System.arraycopy(PAYMENT_ACCRUAL_FACTORS, 1, paymentAccrualFactorsLeft, 0, NB_SUB_PERIOD - 1); final double[] fixingTimesLeft = new double[NB_SUB_PERIOD - 1]; System.arraycopy(TimeCalculator.getTimeBetween(referenceDate, FIXING_DATES), 1, fixingTimesLeft, 0, NB_SUB_PERIOD - 1); final double[] fixingPeriodStartTimesLeft = new double[NB_SUB_PERIOD - 1]; System.arraycopy(TimeCalculator.getTimeBetween(referenceDate, ACCRUAL_START_DATES), 1, fixingPeriodStartTimesLeft, 0, NB_SUB_PERIOD - 1); final double[] fixingPeriodEndTimesLeft = new double[NB_SUB_PERIOD - 1]; System.arraycopy(TimeCalculator.getTimeBetween(referenceDate, FIXING_PERIOD_END_DATES), 1, fixingPeriodEndTimesLeft, 0, NB_SUB_PERIOD - 1); final double[] fixingPeriodAccrualFactorsLeft = new double[NB_SUB_PERIOD - 1]; System.arraycopy(FIXING_ACCRUAL_FACTORS, 1, fixingPeriodAccrualFactorsLeft, 0, NB_SUB_PERIOD - 1); final Coupon cpnConverted = CouponIborCompoundingSpreadDefinition.from( CPN_FROM_INDEX_DEFINITION.getCurrency(), CPN_FROM_INDEX_DEFINITION.getPaymentDate(), CPN_FROM_INDEX_DEFINITION.getAccrualStartDate(), CPN_FROM_INDEX_DEFINITION.getAccrualEndDate(), CPN_FROM_INDEX_DEFINITION.getPaymentYearFraction(), CPN_FROM_INDEX_DEFINITION.getNotional(), CPN_FROM_INDEX_DEFINITION.getIndex(), CPN_FROM_INDEX_DEFINITION.getAccrualStartDates(), CPN_FROM_INDEX_DEFINITION.getAccrualEndDates(), CPN_FROM_INDEX_DEFINITION.getPaymentAccrualFactors(), CPN_FROM_INDEX_DEFINITION.getFixingDates(), CPN_FROM_INDEX_DEFINITION.getFixingPeriodStartDates(), CPN_FROM_INDEX_DEFINITION.getFixingPeriodEndDates(), CPN_FROM_INDEX_DEFINITION.getFixingPeriodAccrualFactors(), CPN_FROM_INDEX_DEFINITION.getSpread(), initialRate).toDerivative(referenceDate, FIXING_TS); final CouponIborCompoundingSpread cpnExpected = new CouponIborCompoundingSpread(USDLIBOR1M.getCurrency(), paymentTime, PAYMENT_ACCRUAL_FACTOR, NOTIONAL, accruedNotional, USDLIBOR1M, paymentAccrualFactorsLeft, fixingTimesLeft, fixingPeriodStartTimesLeft, fixingPeriodEndTimesLeft, fixingPeriodAccrualFactorsLeft, SPREAD); assertEquals("CouponIborCompoundedSpreadDefinition: toDerivatives", cpnExpected, cpnConverted); } @Test public void toDerivativeAfter2FixingInitialRate() { final ZonedDateTime referenceDate = DateUtils.getUTCDate(2012, 9, 20); double initialRate = 0.002; final double paymentTime = TimeCalculator.getTimeBetween(referenceDate, CPN_FROM_INDEX_DEFINITION.getPaymentDate()); final double accruedNotional = (1.0 + PAYMENT_ACCRUAL_FACTORS[0] * (initialRate + SPREAD)) * (1.0 + PAYMENT_ACCRUAL_FACTORS[1] * (FIXING_RATES[2] + SPREAD)) * NOTIONAL; final double[] paymentAccrualFactorsLeft = new double[] {PAYMENT_ACCRUAL_FACTORS[2] }; final double[] fixingTimesLeft = new double[] {TimeCalculator.getTimeBetween(referenceDate, FIXING_DATES[2]) }; final double[] fixingPeriodStartTimesLeft = new double[] {TimeCalculator.getTimeBetween(referenceDate, ACCRUAL_START_DATES[2]) }; final double[] fixingPeriodEndTimesLeft = new double[] {TimeCalculator.getTimeBetween(referenceDate, FIXING_PERIOD_END_DATES[2]) }; final double[] fixingPeriodAccrualFactorsLeft = new double[] {FIXING_ACCRUAL_FACTORS[2] }; final Coupon cpnConverted = CouponIborCompoundingSpreadDefinition.from( CPN_FROM_INDEX_DEFINITION.getCurrency(), CPN_FROM_INDEX_DEFINITION.getPaymentDate(), CPN_FROM_INDEX_DEFINITION.getAccrualStartDate(), CPN_FROM_INDEX_DEFINITION.getAccrualEndDate(), CPN_FROM_INDEX_DEFINITION.getPaymentYearFraction(), CPN_FROM_INDEX_DEFINITION.getNotional(), CPN_FROM_INDEX_DEFINITION.getIndex(), CPN_FROM_INDEX_DEFINITION.getAccrualStartDates(), CPN_FROM_INDEX_DEFINITION.getAccrualEndDates(), CPN_FROM_INDEX_DEFINITION.getPaymentAccrualFactors(), CPN_FROM_INDEX_DEFINITION.getFixingDates(), CPN_FROM_INDEX_DEFINITION.getFixingPeriodStartDates(), CPN_FROM_INDEX_DEFINITION.getFixingPeriodEndDates(), CPN_FROM_INDEX_DEFINITION.getFixingPeriodAccrualFactors(), CPN_FROM_INDEX_DEFINITION.getSpread(), initialRate).toDerivative(referenceDate, FIXING_TS); final CouponIborCompoundingSpread cpnExpected = new CouponIborCompoundingSpread(USDLIBOR1M.getCurrency(), paymentTime, PAYMENT_ACCRUAL_FACTOR, NOTIONAL, accruedNotional, USDLIBOR1M, paymentAccrualFactorsLeft, fixingTimesLeft, fixingPeriodStartTimesLeft, fixingPeriodEndTimesLeft, fixingPeriodAccrualFactorsLeft, SPREAD); assertEquals("CouponIborCompoundedSpreadDefinition: toDerivatives", cpnExpected, cpnConverted); } @Test public void toDerivativeAfterLastFixingInitialRate() { final ZonedDateTime referenceDate = DateUtils.getUTCDate(2012, 10, 25); double initialRate = 0.002; final Coupon cpnConverted = CouponIborCompoundingSpreadDefinition.from( CPN_FROM_INDEX_DEFINITION.getCurrency(), CPN_FROM_INDEX_DEFINITION.getPaymentDate(), CPN_FROM_INDEX_DEFINITION.getAccrualStartDate(), CPN_FROM_INDEX_DEFINITION.getAccrualEndDate(), CPN_FROM_INDEX_DEFINITION.getPaymentYearFraction(), CPN_FROM_INDEX_DEFINITION.getNotional(), CPN_FROM_INDEX_DEFINITION.getIndex(), CPN_FROM_INDEX_DEFINITION.getAccrualStartDates(), CPN_FROM_INDEX_DEFINITION.getAccrualEndDates(), CPN_FROM_INDEX_DEFINITION.getPaymentAccrualFactors(), CPN_FROM_INDEX_DEFINITION.getFixingDates(), CPN_FROM_INDEX_DEFINITION.getFixingPeriodStartDates(), CPN_FROM_INDEX_DEFINITION.getFixingPeriodEndDates(), CPN_FROM_INDEX_DEFINITION.getFixingPeriodAccrualFactors(), CPN_FROM_INDEX_DEFINITION.getSpread(), initialRate).toDerivative(referenceDate, FIXING_TS); final double rate = ((1.0 + PAYMENT_ACCRUAL_FACTORS[0] * (initialRate + SPREAD)) * (1.0 + PAYMENT_ACCRUAL_FACTORS[1] * (FIXING_RATES[2] + SPREAD)) * (1.0 + PAYMENT_ACCRUAL_FACTORS[2] * (FIXING_RATES[3] + SPREAD)) - 1.0) / PAYMENT_ACCRUAL_FACTOR; final double paymentTime = TimeCalculator.getTimeBetween(referenceDate, CPN_FROM_INDEX_DEFINITION.getPaymentDate()); final CouponFixed cpnExpected = new CouponFixed(USDLIBOR1M.getCurrency(), paymentTime, PAYMENT_ACCRUAL_FACTOR, NOTIONAL, rate, ACCRUAL_START_DATES[0], ACCRUAL_END_DATES[NB_SUB_PERIOD - 1]); assertEquals("CouponIborCompoundedSpreadDefinition: toDerivatives", cpnExpected, cpnConverted); } }
package com.balancedpayments.core; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.HashMap; import java.util.Map; import com.balancedpayments.Balanced; import org.apache.commons.codec.binary.Base64; import org.apache.http.HttpEntity; import org.apache.http.HttpResponse; import org.apache.http.HttpVersion; import org.apache.http.NameValuePair; import org.apache.http.StatusLine; import org.apache.http.client.ClientProtocolException; import org.apache.http.client.HttpClient; import org.apache.http.client.methods.HttpDelete; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPut; import org.apache.http.client.methods.HttpUriRequest; import org.apache.http.client.utils.URLEncodedUtils; import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; import org.apache.http.impl.client.DefaultHttpClient; import org.apache.http.impl.conn.PoolingClientConnectionManager; import org.apache.http.message.BasicHeader; import org.apache.http.message.BasicNameValuePair; import org.apache.http.util.EntityUtils; import com.balancedpayments.errors.APIError; import com.balancedpayments.errors.BankAccountVerificationFailure; import com.balancedpayments.errors.Declined; import com.balancedpayments.errors.DuplicateAccountEmailAddress; import com.balancedpayments.errors.HTTPError; import com.balancedpayments.errors.InsufficientFunds; import com.google.gson.Gson; import com.google.gson.reflect.TypeToken; public class Client { private static final int CONNECTION_TIMEOUT = 60 * 1000; private final String root; private final String key; private final HttpClient httpClient; public Client(String location, String key) { this.key = key; this.root = location; PoolingClientConnectionManager connMgr = new PoolingClientConnectionManager(); this.httpClient = new DefaultHttpClient(connMgr); this.httpClient.getParams().setParameter("http.protocol.version", HttpVersion.HTTP_1_1); this.httpClient.getParams().setParameter("http.socket.timeout", new Integer(CONNECTION_TIMEOUT)); this.httpClient.getParams().setParameter("http.connection.timeout", new Integer(CONNECTION_TIMEOUT)); this.httpClient.getParams().setParameter("http.protocol.content-charset", "UTF-8"); } public Map<String, Object> get(String path, Map<String, String> params) throws HTTPError { URI uri = buildUri(path, params); HttpGet request = new HttpGet(uri); addHeaders(request); return op(request); } public Map<String, Object> get(String path) throws HTTPError { return get(path, new HashMap<String, String>()); } public void delete(String path, Map<String, String> params) throws HTTPError { URI uri = buildUri(path, params); HttpDelete request = new HttpDelete(uri); addHeaders(request); op(request); } public void delete(String path) throws HTTPError { delete(path, new HashMap<String, String>()); } public Map<String, Object> put(String path, Object payload) throws HTTPError { URI uri = buildUri(path); HttpPut request = new HttpPut(uri); addHeaders(request); request.setEntity(new StringEntity( serialize(payload), ContentType.APPLICATION_JSON)); return op(request); } public Map<String, Object> post(String path, Object payload) throws HTTPError { URI uri = buildUri(path); HttpPost request = new HttpPost(uri); addHeaders(request); request.setEntity(new StringEntity( serialize(payload), ContentType.APPLICATION_JSON)); return op(request); } private String buildQueryString(Map<String, String> params) { ArrayList<NameValuePair> nvs = new ArrayList<NameValuePair>(params.size()); for (Map.Entry<String, String> entry : params.entrySet()) { NameValuePair nv = new BasicNameValuePair(entry.getKey(), entry.getValue()); nvs.add(nv); } String queryString = URLEncodedUtils.format(nvs, "UTF-8"); return queryString; } private URI buildUri(String path, Map<String, String> params) { StringBuilder sb = new StringBuilder(); sb.append(root); sb.append(path); if (params != null && params.size() > 0) { sb.append("?"); sb.append(buildQueryString(params)); } try { return new URI(sb.toString()); } catch (URISyntaxException e) { throw new RuntimeException(e); } } private URI buildUri(String path) { return buildUri(path, null); } private void addHeaders(HttpUriRequest request) { request.addHeader(new BasicHeader("User-Agent", Balanced.getInstance().getAgent() + '/' + Balanced.getInstance().getVersion())); //request.addHeader(new BasicHeader("Accept", "application/json")); request.addHeader(new BasicHeader("Content-Type", "application/json;revision=" + Balanced.getInstance().getApiRevision())); request.addHeader(new BasicHeader("Accept", "application/vnd.api+json;revision=" + Balanced.getInstance().getApiRevision())); } private Map<String, Object> op(HttpUriRequest request) throws HTTPError { if (key != null) { byte auth[]; try { auth = (key + ":").getBytes("UTF-8"); } catch (UnsupportedEncodingException e) { throw new RuntimeException(e); } String authEncoding = Base64.encodeBase64String(auth);// .encodeBase64URLSafeString(auth); request.setHeader("Authorization", "Basic " + authEncoding); } HttpResponse response; try { response = httpClient.execute(request); } catch (ClientProtocolException e) { throw new RuntimeException(e); } catch (IOException e) { throw new RuntimeException(e); } String body = null; Map<String, Object> payload = null; HttpEntity entity = response.getEntity(); if (entity != null) { try { body = EntityUtils.toString(entity); } catch (IOException e) { throw new RuntimeException(e); } if (ContentType.APPLICATION_JSON.getMimeType().equals(entity.getContentType().getValue())) payload = deserialize(body); } StatusLine status = response.getStatusLine(); if (status.getStatusCode() >= 299) { if (payload != null && status.getStatusCode() != 300) error(response, body, payload); else throw new HTTPError(response, body); } return payload; } private String serialize(Object payload) { Gson gson = new Gson(); String json = gson.toJson(payload); return json; } private Map<String, Object> deserialize(String body) { Gson gson = new Gson(); return gson.fromJson(body, new TypeToken<Map<String, Object>>() {}.getType()); } private static void error( HttpResponse response, String body, Map<String, Object> payload) throws APIError { if (!payload.containsKey("errors")) { throw new APIError(response, body, payload); } else { Map<String, Object> entity = (Map<String, Object>) ((ArrayList) payload.get("errors")).get(0); String category_code = (String) entity.get("category_code"); // http://stackoverflow.com/questions/3434466/creating-a-factory-method-in-java-that-doesnt-rely-on-if-else if (InsufficientFunds.CODES.contains(category_code)) throw new InsufficientFunds(response, body, entity); else if (Declined.CODES.contains(category_code)) throw new Declined(response, body, entity); else if (DuplicateAccountEmailAddress.CODES.contains(category_code)) throw new DuplicateAccountEmailAddress(response, body, entity); else if (BankAccountVerificationFailure.CODES.contains(category_code)) throw new BankAccountVerificationFailure(response, body, entity); throw new APIError(response, body, entity); } } }
/** * * This file is part of the SJWidget library. * (c) 2005-2012 Bjorn Roche * Development of this library has been supported by Indaba Media (http://www.indabamusic.com) * and XO Audio (http://www.xoaudio.com) * * for copyright and sharing permissions, please see the COPYING.txt file which you should * have recieved with this file. * */ /* * Created on April 22, 2004 * * */ package com.xowave.util; import java.io.*; import java.net.URI; import java.nio.channels.*; import java.util.Calendar; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * @author bjorn * * Adds a copyTo function to java.io.file and some other utilities. * copyTo uses NIO, when safe, for actual copying so * it's pretty fast. */ public final class XFile extends File { private static final long serialVersionUID = 1L; /** * @param pathname */ public XFile(String pathname) { super(pathname); } /** * @param pathname */ public XFile(File fl) { super(fl.getPath()); } /** * @param parent * @param child */ public XFile(String parent, String child) { super(parent, child); } /** * @param parent * @param child */ public XFile(File parent, String child) { super(parent, child); } /** * @param uri */ public XFile(URI uri) { super(uri); } /** if the given file is already an XFile, it returns the file, properly cast. * Otherwise, this returns a newly created XFile. */ public static XFile asXFile(File file) { if( file instanceof XFile ) return (XFile) file; return new XFile( file ); } /**returns a string representing the base name without extension or ".". */ public String getBaseName() { return getBaseNameFromFileName(getName()); } /** * Computes the Base name in the same way as getBaseName(). * @param s the string representing the filename. * @return the base name without the extension or dot (".") or the full name if the file does not contain a dot. */ public final static String getBaseNameFromFileName(String s) { int index = s.lastIndexOf('.'); if (index != -1) return s.substring(0, index); else return s; } /** * Computes the Base name in the same way as getBaseName(). * @param f the file to derive the basename from. * @return the base name without the extension or dot (".") or the full name if the file does not contain a dot. */ public final static String getBaseName(File f) { return getBaseNameFromFileName(f.getName()); } /** * the filename extension or an empty string if the filename does not contain a dot ".". */ public static final String getExtension(File f) { return getExtensionFromFilename(f.getName()); } /**returns a string representing the extension of the file name without the basename or ".". * if there is no extension, it returns an empty string. */ public String getExtension() { return getExtensionFromFilename(getName()); } /** * Computes the extension from a filename (not a full path) in the same way as getExtension. * * @param s the filename * @return the extension, or an empty string if the filename does not contain a dot ("."). */ public final static String getExtensionFromFilename(String s) { int index = s.lastIndexOf('.'); if (index == -1) return ""; else return s.substring(index + 1); } /** * creates a new File with the same path and base name as this file, but renames the extension to the * given extension. If the given extension is "", the returned file will have no extension. the '.' part * of the extension is not necessary. */ public XFile newFileWithExtension(String newExt) { if( newExt.startsWith( "." ) ) newExt = newExt.substring(1); if( newExt.equals("") ) return new XFile( this.getParentFile(), getBaseName() ); return new XFile( this.getParentFile(), getBaseName() + "." + newExt ); } public interface CopyProgressListener { /** * called when the copy process has done some work. This function is called at * least twice with values 0.0 and 1.0, unless there is an exception. Fast copies * such as those that use NIO, may not be able to send these updates. Updates will * be in the same thread as the copy! * * @param complete a value between 0.0 and 1.0 indicating the amount of * data that has been copied. * @return true to continue the copy, false to cancel. note that canceling * may leave an incomplete file. */ public boolean updateCopyProgress( float complete ); } /** * Moves a file from its current location to target. This may involve a * simple super.renameTo() or a more complex copy and deletion of target. * In the latter case, a copyProgressListener is passed on to the copyTo() * function. This function either succeeds or fails with an IOException. * Best effort is made to minimize the impact of failure. */ public void moveTo(File target, CopyProgressListener cpl) throws IOException { if( cpl != null && !cpl.updateCopyProgress(0) ) return; //canceled if( this.renameTo( target ) ) { //phew, that was easy! if( cpl != null ) cpl.updateCopyProgress(1); return; } //Java refused to move the file for me. Lets do it manually: copyTo( target, cpl ); //that succeeded so lets delete the source: if( !this.delete() ) { //we failed to delete, so lets delete the target, and throw an exception target.delete(); //if this fails, there's not much we can reasonably do throw new IOException( "Can't move: source file cannot be removed." ); } } /** * * Copies this file to the target. Before copying, * existence of the target file is checked. If the target file exists, * an IOException is thrown. If cpl is non-null, the operation can be canceled * by returning false from updateCopyProgress. Note that this * type of canceling is not always supported. Note that the copied file will be * incomplete after cancellation, so it must be deleted by hand. * * @param target the location of the new file * @param cpl allows a client to receive updates about the progress and also * the ability to cancel. Note that updates are sent in the copy thread and, * thus, care must be taken to ensure thread safety, especially with * GUI items. Also, work done in the callbacks can slow down the copy progress. * @throws IOException if target exists, this does not exist or some problem occurs * during read or write. Best effort is made to remove any partially copied file. */ public void copyTo(File target, CopyProgressListener cpl ) throws IOException { if (target.exists()) throw new IOException("File Exists: " + target); if (!this.exists()) throw new FileNotFoundException(); if( this.isDirectory() ) throw new IOException( "Cannot copy directory: " + this ); //NIO is bloody fast so we use it if it is safe to do so. final boolean useNIO = Environment.isNIOSafe(); try { boolean shouldContinue = true; if( cpl != null ) shouldContinue = cpl.updateCopyProgress(0); if( !shouldContinue ) return; if (useNIO) { // Create channel on the source FileChannel srcChannel = new FileInputStream(this).getChannel(); // Create channel on the destination FileChannel dstChannel = new FileOutputStream(target).getChannel(); // Copy file contents from source to destination dstChannel.transferFrom(srcChannel, 0, srcChannel.size()); // Close the channels srcChannel.close(); dstChannel.close(); } else { // Create in/out streams BufferedInputStream bis = new BufferedInputStream(new FileInputStream(this)); BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(target)); long totalSize = this.length(); // Declare variable for each byte read int ch; // copy file one byte at a time until we reach the end of the file or the user cancels if (cpl == null) { while ((ch = bis.read()) != -1) bos.write(ch); } else { long count = 0; while ((ch = bis.read()) != -1) { bos.write(ch); if( count % 1024 == 0 ) { shouldContinue = cpl.updateCopyProgress( ((float)count) / ((float)totalSize) ) ; if (!shouldContinue) { bis.close(); bos.close(); return; } } ++count; } } bos.flush(); try { bis.close(); } catch (IOException ioe) { System.out .println("WARNING: Could not close file after copy(1)."); } try { bos.close(); } catch (IOException ioe) { System.out .println("WARNING: Could not close file after copy(2)."); } } if( cpl != null ) cpl.updateCopyProgress(1); } catch (IOException ioe) { target.delete(); throw ioe; } } /** * Matches characters that should not be used in a file name. * This is not guaranteed to be safe, but should be alright on most OSes. */ public static final Pattern unsafeChars = Pattern .compile("[^a-zA-Z0-9]&&^ &&^-"); /** * creates a unique folder given the baseName, base path and the current date. * Appends a number if necessary. * * Note that the baseName will have all characters * that match "[^a-zA-Z0-9]&&^ &&^-" removed. * * note that mkdir() must be called on the created object for the * file to exist. * * */ public static XFile getUniqueDatedFolder(File directory, String baseName) { Calendar cal = Calendar.getInstance(); String initialName = (baseName == null || baseName.equals("")) ? "" : baseName + " "; initialName += cal.get(Calendar.YEAR) + "-"; initialName += twoDigits(cal.get(Calendar.MONTH) + 1) + "-"; initialName += twoDigits(cal.get(Calendar.DAY_OF_MONTH)); initialName = unsafeChars.matcher(initialName).replaceAll(""); int count = 2; String finalName = initialName; XFile mf; while ((mf = new XFile(directory, finalName)).exists()) { finalName = initialName + "-" + threeDigits(count); ++count; } return mf; } private static String twoDigits(String s) { if (s.length() == 0) return "00"; else if (s.length() == 1) return "0" + s; else return s; } private static String twoDigits(int i) { String s = i + ""; return twoDigits(s); } private static String threeDigits(String s) { if (s.length() == 0) return "000"; else if (s.length() == 1) return "00" + s; else if (s.length() == 2) return "0" + s; else return s; } private static String threeDigits(int i) { String s = i + ""; return threeDigits(s); } /** * Depending on the OS, this either strips charactors that should not be in a * file-name (such as the path separator) or removes all special chataters. * returns the stripted string. * * * Returns a string with all special character removed, leaving only * spaces, dashes, numbers, periods and letters. This should be fine for any * sane OS. */ public static String getSafeFileName(String fileNameCandidate) { return fileNameCandidate.replaceAll("\\/|:;?<>*","-"); } public static String getSafeFileNameLeavingStars(String fileNameCandidate) { return fileNameCandidate.replaceAll("\\/|:;?<>","-"); } /** * Recursively deletes this file and sub-files. * On Failure, stops immediately and returns false. */ public boolean deleteTree() { return deleteTree(this); } private static boolean deleteTree(File f) { if (f.isDirectory()) { File[] fls = f.listFiles(); for (int i = 0; i < fls.length; ++i) if (!deleteTree(fls[i])) return false; } return f.delete(); } /** if the file exists and is a directory, returns true. If it does not exits, * it tries to create it and returns true if successful. In all other cases, * it returns false. * Note this also returns true if we are not working locally. */ public boolean guaranteeDir() { if( exists() && isDirectory() ) return true; if( exists() ) return false; return mkdir(); } public XFile incrementFilename() { String base = getBaseName(); String ext = getExtension(); Pattern pattern = Pattern.compile("-[\\d]+$"); Matcher matcher = pattern.matcher(base); if( matcher.find() ) { int index = Integer.parseInt( base.substring(matcher.start()+1) ); ++index; base = base.substring(0,matcher.start()) + "-"+ index; } else { base = base + "-2"; } String filename = base; if( getName().indexOf('.') != -1 ) filename += "." + ext; return new XFile( getParentFile(), filename ); } @Override public XFile getParentFile() { File f = super.getParentFile(); if( f == null ) return null; return new XFile( f ); } @Override public XFile getAbsoluteFile() { return new XFile( super.getAbsoluteFile() ); } @Override public XFile getCanonicalFile() throws IOException { return new XFile( super.getCanonicalFile() ); } public XFile[] listXFiles() { File[] fs = listFiles(); XFile[] ret = new XFile[ fs.length ]; for( int i=0; i<fs.length; ++i ) ret[i] = new XFile( fs[i] ); return ret; } public XFile[] listXFiles( FilenameFilter ff) { File[] fs = listFiles( ff ); XFile[] ret = new XFile[ fs.length ]; for( int i=0; i<fs.length; ++i ) ret[i] = new XFile( fs[i] ); return ret; } public XFile[] listXFiles( FileFilter ff) { File[] fs = listFiles( ff ); XFile[] ret = new XFile[ fs.length ]; for( int i=0; i<fs.length; ++i ) ret[i] = new XFile( fs[i] ); return ret; } @Override public boolean equals( Object f ) { return super.equals(f); } @Override public int hashCode() { return super.hashCode(); } }
/* * This file is part of SpongeAPI, licensed under the MIT License (MIT). * * Copyright (c) SpongePowered <https://www.spongepowered.org> * Copyright (c) contributors * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.spongepowered.api.world; import net.kyori.adventure.audience.Audience; import net.kyori.adventure.audience.ForwardingAudience; import org.spongepowered.api.Server; import org.spongepowered.api.effect.Viewer; import org.spongepowered.api.entity.Entity; import org.spongepowered.api.entity.living.player.Player; import org.spongepowered.api.registry.RegistryHolder; import org.spongepowered.api.service.context.ContextSource; import org.spongepowered.api.util.annotation.DoNotStore; import org.spongepowered.api.world.chunk.WorldChunk; import org.spongepowered.api.world.storage.WorldProperties; import org.spongepowered.api.world.volume.archetype.ArchetypeVolumeCreator; import org.spongepowered.api.world.volume.block.PhysicsAwareMutableBlockVolume; import org.spongepowered.api.world.weather.WeatherUniverse; import org.spongepowered.math.vector.Vector3d; import org.spongepowered.math.vector.Vector3i; import java.util.Collection; import java.util.Objects; import java.util.Optional; import java.util.function.Predicate; /** * A loaded Minecraft world. */ @DoNotStore public interface World<W extends World<W, L>, L extends Location<W, L>> extends ForwardingAudience, WorldLike<W>, LocationCreator<W, L>, PhysicsAwareMutableBlockVolume<W>, ContextSource, Viewer, ArchetypeVolumeCreator, WeatherUniverse, RegistryHolder { /** * Gets the {@link WorldProperties properties}. * * @return The properties */ WorldProperties properties(); @Override @SuppressWarnings("unchecked") default W world() { return (W) this; } /** * Gets if this world is currently loaded. * * <p>An assumption can be made that if this returns false, this is considered a stale object.</p> * * @return True if loaded, false if not */ boolean isLoaded(); /** * Gets an unmodifiable collection of {@link Player players} currently in this world. * * @return The players */ @Override Collection<? extends Player> players(); @Override default Iterable<? extends Audience> audiences() { return this.players(); } default Optional<? extends Player> closestPlayer(final Vector3i position, final double distance) { Objects.requireNonNull(position, "position"); return this.closestPlayer(position.x(), position.y(), position.z(), distance, player -> true); } default Optional<? extends Player> closestPlayer(final Vector3i position, final double distance, final Predicate<? super Player> predicate) { Objects.requireNonNull(position, "position"); Objects.requireNonNull(predicate, "predicate"); return this.closestPlayer(position.x(), position.y(), position.z(), distance, predicate); } default Optional<? extends Player> closestPlayer(final Entity entity, final double distance) { Objects.requireNonNull(entity, "entity"); final Vector3d position = entity.location().position(); return this.closestPlayer(position.floorX(), position.floorY(), position.floorZ(), distance, player -> true); } default Optional<? extends Player> closestPlayer(final Entity entity, final double distance, final Predicate<? super Player> predicate) { Objects.requireNonNull(entity, "entity"); Objects.requireNonNull(predicate, "predicate"); final Vector3d position = entity.location().position(); return this.closestPlayer(position.floorX(), position.floorY(), position.floorZ(), distance, predicate); } default Optional<? extends Player> closestPlayer(final int x, final int y, final int z, final double distance) { return this.closestPlayer(x, y, z, distance, player -> true); } Optional<? extends Player> closestPlayer(int x, int y, int z, double distance, Predicate<? super Player> predicate); /** * {@inheritDoc} * This gets a guaranteed {@link WorldChunk} at the desired block position; however, * the {@link WorldChunk} instance may be {@link WorldChunk#isEmpty() empty}, and * likewise, may not be generated, valid, pre-existing. It is important to * check for these cases prior to attempting to modify the chunk. * * <p>Note that this is still different from {@link #chunk(Vector3i)} * due to it being a relative block position which can vary depending on * implementation and other mods installed.</p> * * @param blockPosition The block position to be transformed for relative chunk position * @return The available chunk at that position */ @Override WorldChunk chunkAtBlock(final Vector3i blockPosition); /** * {@inheritDoc} * This gets a guaranteed {@link WorldChunk} at the desired block position; however, * the {@link WorldChunk} instance may be {@link WorldChunk#isEmpty() empty}, and * likewise, may not be generated, valid, pre-existing. It is important to * check for these cases prior to attempting to modify the chunk. * * <p>Note that this is still different from {@link #chunk(Vector3i)} * due to the relative block position dictated by {@link Server#chunkLayout()}, * which can vary depending on implementation and other mods installed.</p> * * @param bx The block x coordinate * @param by The block y coordinate * @param bz The block z coordinate * @return The available chunk at that position */ @Override WorldChunk chunkAtBlock(int bx, int by, int bz); /** * {@inheritDoc} * This gets a guaranteed {@link WorldChunk} at the desired chunk position; however, * the {@link WorldChunk} instance may be {@link WorldChunk#isEmpty() empty}, and * likewise, may not be generated, valid, pre-existing. It is important to * check for these cases prior to attempting to modify the chunk. * * @param chunkPos The chunk position relative to the {@link Server#chunkLayout() chunk layout} * @return The available chunk at that position */ @Override default WorldChunk chunk(final Vector3i chunkPos) { Objects.requireNonNull(chunkPos, "chunkPos"); return this.chunk(chunkPos.x(), chunkPos.y(), chunkPos.z()); } /** * {@inheritDoc} * This gets a guaranteed {@link WorldChunk} at the desired chunk position; however, * the {@link WorldChunk} instance may be {@link WorldChunk#isEmpty() empty}, and * likewise, may not be generated, valid, pre-existing. It is important to * check for these cases prior to attempting to modify the chunk. * * @param cx The x chunk coordinate * @param cy The y coordinate * @param cz The z chunk coordinate * @return The available chunk at the chunk position */ @Override WorldChunk chunk(int cx, int cy, int cz); /** * Gets the chunk at the given chunk coordinate position if it exists or if * {@code shouldGenerate} is true and the chunk is generated. * * @param chunkPosition The position * @param shouldGenerate True to generate a new chunk * @return The loaded or generated chunk, if already generated */ default Optional<WorldChunk> loadChunk(final Vector3i chunkPosition, final boolean shouldGenerate) { Objects.requireNonNull(chunkPosition, "chunkPosition"); return this.loadChunk(chunkPosition.x(), chunkPosition.y(), chunkPosition.z(), shouldGenerate); } /** * Gets the chunk at the given chunk coordinate position if it exists or if * {@code shouldGenerate} is true and the chunk is generated. * * <p>In Vanilla, the y coordinate will always be 0.</p> * * @param cx The x coordinate * @param cy The y coordinate * @param cz The z coordinate * @param shouldGenerate True to generate a new chunk * @return The loaded or generated chunk, if already generated */ Optional<WorldChunk> loadChunk(int cx, int cy, int cz, boolean shouldGenerate); /** * Returns a Collection of all actively loaded chunks in this world. * * <p>The ordering of the returned chunks is undefined.</p> * * @return The loaded chunks */ Iterable<WorldChunk> loadedChunks(); }
package jat.core.plot.plot.plots; import jat.core.plot.plot.FrameView; import jat.core.plot.plot.Plot3DPanel; import jat.core.plot.plot.render.AbstractDrawer; import java.awt.Color; public class HistogramPlot3D extends Plot { double[][] topNW; double[][] topNE; double[][] topSW; double[][] topSE; double[][] bottomNW; double[][] bottomNE; double[][] bottomSW; double[][] bottomSE; double[][] widths; double[] width_constant = { -1, -1 }; double[][] XY; boolean fill_shape = true; public HistogramPlot3D(String n, Color c, double[][] _XY, double[][] w) { super(n, c); XY = _XY; widths = w; build(); } public HistogramPlot3D(String n, Color c, double[][] _XY, double wX, double wY) { super(n, c); XY = _XY; width_constant = new double[] { wX, wY }; build(); } public HistogramPlot3D(String n, Color c, double[][] _XY, double[] w) { super(n, c); XY = _XY; width_constant = w; build(); } private void build() { if (width_constant[0] > 0) { topNW = new double[XY.length][]; topNE = new double[XY.length][]; topSW = new double[XY.length][]; topSE = new double[XY.length][]; bottomNW = new double[XY.length][]; bottomNE = new double[XY.length][]; bottomSW = new double[XY.length][]; bottomSE = new double[XY.length][]; for (int i = 0; i < XY.length; i++) { topNW[i] = new double[] { XY[i][0] - width_constant[0] / 2, XY[i][1] + width_constant[1] / 2, XY[i][2] }; topNE[i] = new double[] { XY[i][0] + width_constant[0] / 2, XY[i][1] + width_constant[1] / 2, XY[i][2] }; topSW[i] = new double[] { XY[i][0] - width_constant[0] / 2, XY[i][1] - width_constant[1] / 2, XY[i][2] }; topSE[i] = new double[] { XY[i][0] + width_constant[0] / 2, XY[i][1] - width_constant[1] / 2, XY[i][2] }; bottomNW[i] = new double[] { XY[i][0] - width_constant[0] / 2, XY[i][1] + width_constant[1] / 2, 0 }; bottomNE[i] = new double[] { XY[i][0] + width_constant[0] / 2, XY[i][1] + width_constant[1] / 2, 0 }; bottomSW[i] = new double[] { XY[i][0] - width_constant[0] / 2, XY[i][1] - width_constant[1] / 2, 0 }; bottomSE[i] = new double[] { XY[i][0] + width_constant[0] / 2, XY[i][1] - width_constant[1] / 2, 0 }; } } else { topNW = new double[XY.length][]; topNE = new double[XY.length][]; topSW = new double[XY.length][]; topSE = new double[XY.length][]; bottomNW = new double[XY.length][]; bottomNE = new double[XY.length][]; bottomSW = new double[XY.length][]; bottomSE = new double[XY.length][]; for (int i = 0; i < XY.length; i++) { topNW[i] = new double[] { XY[i][0] - widths[i][0] / 2, XY[i][1] + widths[i][1] / 2, XY[i][2] }; topNE[i] = new double[] { XY[i][0] + widths[i][0] / 2, XY[i][1] + widths[i][1] / 2, XY[i][2] }; topSW[i] = new double[] { XY[i][0] - widths[i][0] / 2, XY[i][1] - widths[i][1] / 2, XY[i][2] }; topSE[i] = new double[] { XY[i][0] + widths[i][0] / 2, XY[i][1] - widths[i][1] / 2, XY[i][2] }; bottomNW[i] = new double[] { XY[i][0] - widths[i][0] / 2, XY[i][1] + widths[i][1] / 2, 0 }; bottomNE[i] = new double[] { XY[i][0] + widths[i][0] / 2, XY[i][1] + widths[i][1] / 2, 0 }; bottomSW[i] = new double[] { XY[i][0] - widths[i][0] / 2, XY[i][1] - widths[i][1] / 2, 0 }; bottomSE[i] = new double[] { XY[i][0] + widths[i][0] / 2, XY[i][1] - widths[i][1] / 2, 0 }; } } } public void plot(AbstractDrawer draw, Color c) { if (!visible) return; draw.canvas.includeInBounds(bottomSW[0]); draw.canvas.includeInBounds(topNE[XY.length - 1]); draw.setColor(c); draw.setLineType(AbstractDrawer.CONTINOUS_LINE); for (int i = 0; i < XY.length; i++) { if (topNW[i][2]!=bottomNW[i][2]) { draw.drawLine(topNW[i], topNE[i]); draw.drawLine(topNE[i], topSE[i]); draw.drawLine(topSE[i], topSW[i]); draw.drawLine(topSW[i], topNW[i]); draw.drawLine(bottomNW[i], bottomNE[i]); draw.drawLine(bottomNE[i], bottomSE[i]); draw.drawLine(bottomSE[i], bottomSW[i]); draw.drawLine(bottomSW[i], bottomNW[i]); draw.drawLine(bottomNW[i], topNW[i]); draw.drawLine(bottomNE[i], topNE[i]); draw.drawLine(bottomSE[i], topSE[i]); draw.drawLine(bottomSW[i], topSW[i]); if (fill_shape) { draw.fillPolygon(0.2f,topNW[i], topNE[i], topSE[i], topSW[i]); //draw.fillPolygon(bottomNW[i], bottomNE[i], bottomSE[i], bottomSW[i]); /*draw.fillPolygon(topNW[i], topNE[i], bottomNE[i], bottomNW[i]); draw.fillPolygon(topSW[i], topSE[i], bottomSE[i], bottomSW[i]); draw.fillPolygon(topNE[i], topSE[i], bottomSE[i], bottomNE[i]); draw.fillPolygon(topNW[i], topSW[i], bottomSW[i], bottomNW[i]);*/ } } } } @Override public void setData(double[][] d) { XY = d; } @Override public double[][] getData() { return XY; } public void setDataWidth(double[][] w) { widths = w; } public void setDataWidth(double... w) { width_constant = w; build(); } public double[][] getDataWidth() { if (width_constant[0] > 0) { widths = new double[XY.length][2]; for (int i = 0; i < widths.length; i++) { widths[i][0] = width_constant[0]; widths[i][1] = width_constant[1]; } } return widths; } public void setData(double[][] d, double[][] w) { XY = d; widths = w; } public void setData(double[][] d, double... w) { XY = d; setDataWidth(w); } public double[] isSelected(int[] screenCoordTest, AbstractDrawer draw) { for (int i = 0; i < XY.length; i++) { int[] screenCoord = draw.project(XY[i]); if ((screenCoord[0] + note_precision > screenCoordTest[0]) && (screenCoord[0] - note_precision < screenCoordTest[0]) && (screenCoord[1] + note_precision > screenCoordTest[1]) && (screenCoord[1] - note_precision < screenCoordTest[1])) return XY[i]; } return null; } public static void main(String[] args) { double[][] XY = new double[500][2]; for (int i = 0; i < XY.length; i++) { XY[i][0] = Math.random()+Math.random(); XY[i][1] = Math.random()+Math.random(); } Plot3DPanel p = new Plot3DPanel("SOUTH"); p.addHistogramPlot("test", XY, 4, 6); new FrameView(p); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.query; import com.carrotsearch.randomizedtesting.generators.RandomPicks; import org.apache.lucene.document.IntPoint; import org.apache.lucene.document.LongPoint; import org.apache.lucene.search.PointRangeQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermRangeQuery; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType.Relation; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.AbstractQueryTestCase; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.chrono.ISOChronology; import org.locationtech.spatial4j.shape.SpatialRelation; import java.io.IOException; import java.util.HashMap; import java.util.Map; import static org.elasticsearch.index.query.QueryBuilders.rangeQuery; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.sameInstance; public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuilder> { @Override protected RangeQueryBuilder doCreateTestQueryBuilder() { RangeQueryBuilder query; // switch between numeric and date ranges switch (randomIntBetween(0, 2)) { case 0: // use mapped integer field for numeric range queries query = new RangeQueryBuilder(randomBoolean() ? INT_FIELD_NAME : INT_RANGE_FIELD_NAME); query.from(randomIntBetween(1, 100)); query.to(randomIntBetween(101, 200)); break; case 1: // use mapped date field, using date string representation query = new RangeQueryBuilder(randomBoolean() ? DATE_FIELD_NAME : DATE_RANGE_FIELD_NAME); query.from(new DateTime(System.currentTimeMillis() - randomIntBetween(0, 1000000), DateTimeZone.UTC).toString()); query.to(new DateTime(System.currentTimeMillis() + randomIntBetween(0, 1000000), DateTimeZone.UTC).toString()); // Create timestamp option only then we have a date mapper, // otherwise we could trigger exception. if (createShardContext().getMapperService().fullName(DATE_FIELD_NAME) != null) { if (randomBoolean()) { query.timeZone(randomDateTimeZone().getID()); } if (randomBoolean()) { query.format("yyyy-MM-dd'T'HH:mm:ss.SSSZZ"); } } if (query.fieldName().equals(DATE_RANGE_FIELD_NAME)) { query.relation(RandomPicks.randomFrom(random(), ShapeRelation.values()).getRelationName()); } break; case 2: default: query = new RangeQueryBuilder(STRING_FIELD_NAME); query.from("a" + randomAsciiOfLengthBetween(1, 10)); query.to("z" + randomAsciiOfLengthBetween(1, 10)); break; } query.includeLower(randomBoolean()).includeUpper(randomBoolean()); if (randomBoolean()) { query.from(null); } if (randomBoolean()) { query.to(null); } return query; } @Override protected Map<String, RangeQueryBuilder> getAlternateVersions() { Map<String, RangeQueryBuilder> alternateVersions = new HashMap<>(); RangeQueryBuilder rangeQueryBuilder = new RangeQueryBuilder(INT_FIELD_NAME); rangeQueryBuilder.from(randomIntBetween(1, 100)).to(randomIntBetween(101, 200)); rangeQueryBuilder.includeLower(randomBoolean()); rangeQueryBuilder.includeUpper(randomBoolean()); String query = "{\n" + " \"range\":{\n" + " \"" + INT_FIELD_NAME + "\": {\n" + " \"" + (rangeQueryBuilder.includeLower() ? "gte" : "gt") + "\": " + rangeQueryBuilder.from() + ",\n" + " \"" + (rangeQueryBuilder.includeUpper() ? "lte" : "lt") + "\": " + rangeQueryBuilder.to() + "\n" + " }\n" + " }\n" + "}"; alternateVersions.put(query, rangeQueryBuilder); return alternateVersions; } @Override protected void doAssertLuceneQuery(RangeQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException { if (getCurrentTypes().length == 0 || (queryBuilder.fieldName().equals(DATE_FIELD_NAME) == false && queryBuilder.fieldName().equals(INT_FIELD_NAME) == false && queryBuilder.fieldName().equals(DATE_RANGE_FIELD_NAME) == false && queryBuilder.fieldName().equals(INT_RANGE_FIELD_NAME) == false)) { assertThat(query, instanceOf(TermRangeQuery.class)); TermRangeQuery termRangeQuery = (TermRangeQuery) query; assertThat(termRangeQuery.getField(), equalTo(queryBuilder.fieldName())); assertThat(termRangeQuery.getLowerTerm(), equalTo(BytesRefs.toBytesRef(queryBuilder.from()))); assertThat(termRangeQuery.getUpperTerm(), equalTo(BytesRefs.toBytesRef(queryBuilder.to()))); assertThat(termRangeQuery.includesLower(), equalTo(queryBuilder.includeLower())); assertThat(termRangeQuery.includesUpper(), equalTo(queryBuilder.includeUpper())); } else if (queryBuilder.fieldName().equals(DATE_FIELD_NAME)) { assertThat(query, instanceOf(PointRangeQuery.class)); MapperService mapperService = context.getQueryShardContext().getMapperService(); MappedFieldType mappedFieldType = mapperService.fullName(DATE_FIELD_NAME); final Long fromInMillis; final Long toInMillis; // we have to normalize the incoming value into milliseconds since it could be literally anything if (mappedFieldType instanceof DateFieldMapper.DateFieldType) { fromInMillis = queryBuilder.from() == null ? null : ((DateFieldMapper.DateFieldType) mappedFieldType).parseToMilliseconds(queryBuilder.from(), queryBuilder.includeLower(), queryBuilder.getDateTimeZone(), queryBuilder.getForceDateParser(), context.getQueryShardContext()); toInMillis = queryBuilder.to() == null ? null : ((DateFieldMapper.DateFieldType) mappedFieldType).parseToMilliseconds(queryBuilder.to(), queryBuilder.includeUpper(), queryBuilder.getDateTimeZone(), queryBuilder.getForceDateParser(), context.getQueryShardContext()); } else { fromInMillis = toInMillis = null; fail("unexpected mapped field type: [" + mappedFieldType.getClass() + "] " + mappedFieldType.toString()); } Long min = fromInMillis; Long max = toInMillis; long minLong, maxLong; if (min == null) { minLong = Long.MIN_VALUE; } else { minLong = min.longValue(); if (queryBuilder.includeLower() == false && minLong != Long.MAX_VALUE) { minLong++; } } if (max == null) { maxLong = Long.MAX_VALUE; } else { maxLong = max.longValue(); if (queryBuilder.includeUpper() == false && maxLong != Long.MIN_VALUE) { maxLong--; } } assertEquals(LongPoint.newRangeQuery(DATE_FIELD_NAME, minLong, maxLong), query); } else if (queryBuilder.fieldName().equals(INT_FIELD_NAME)) { assertThat(query, instanceOf(PointRangeQuery.class)); Integer min = (Integer) queryBuilder.from(); Integer max = (Integer) queryBuilder.to(); int minInt, maxInt; if (min == null) { minInt = Integer.MIN_VALUE; } else { minInt = min.intValue(); if (queryBuilder.includeLower() == false && minInt != Integer.MAX_VALUE) { minInt++; } } if (max == null) { maxInt = Integer.MAX_VALUE; } else { maxInt = max.intValue(); if (queryBuilder.includeUpper() == false && maxInt != Integer.MIN_VALUE) { maxInt--; } } } else if (queryBuilder.fieldName().equals(DATE_RANGE_FIELD_NAME) || queryBuilder.fieldName().equals(INT_RANGE_FIELD_NAME)) { // todo can't check RangeFieldQuery because its currently package private (this will change) } else { throw new UnsupportedOperationException(); } } public void testIllegalArguments() { expectThrows(IllegalArgumentException.class, () -> new RangeQueryBuilder((String) null)); expectThrows(IllegalArgumentException.class, () -> new RangeQueryBuilder("")); RangeQueryBuilder rangeQueryBuilder = new RangeQueryBuilder("test"); expectThrows(IllegalArgumentException.class, () -> rangeQueryBuilder.timeZone(null)); expectThrows(IllegalArgumentException.class, () -> rangeQueryBuilder.timeZone("badID")); expectThrows(IllegalArgumentException.class, () -> rangeQueryBuilder.format(null)); expectThrows(IllegalArgumentException.class, () -> rangeQueryBuilder.format("badFormat")); } /** * Specifying a timezone together with a numeric range query should throw an exception. */ public void testToQueryNonDateWithTimezone() throws QueryShardException, IOException { RangeQueryBuilder query = new RangeQueryBuilder(INT_FIELD_NAME); query.from(1).to(10).timeZone("UTC"); QueryShardException e = expectThrows(QueryShardException.class, () -> query.toQuery(createShardContext())); assertThat(e.getMessage(), containsString("[range] time_zone can not be applied")); } /** * Specifying a timezone together with an unmapped field should throw an exception. */ public void testToQueryUnmappedWithTimezone() throws QueryShardException, IOException { RangeQueryBuilder query = new RangeQueryBuilder("bogus_field"); query.from(1).to(10).timeZone("UTC"); QueryShardException e = expectThrows(QueryShardException.class, () -> query.toQuery(createShardContext())); assertThat(e.getMessage(), containsString("[range] time_zone can not be applied")); } public void testToQueryNumericField() throws IOException { assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); Query parsedQuery = rangeQuery(INT_FIELD_NAME).from(23).to(54).includeLower(true).includeUpper(false).toQuery(createShardContext()); // since age is automatically registered in data, we encode it as numeric assertThat(parsedQuery, instanceOf(PointRangeQuery.class)); assertEquals(IntPoint.newRangeQuery(INT_FIELD_NAME, 23, 53), parsedQuery); } public void testDateRangeQueryFormat() throws IOException { assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); // We test 01/01/2012 from gte and 2030 for lt String query = "{\n" + " \"range\" : {\n" + " \"" + DATE_FIELD_NAME + "\" : {\n" + " \"gte\": \"01/01/2012\",\n" + " \"lt\": \"2030\",\n" + " \"format\": \"dd/MM/yyyy||yyyy\"\n" + " }\n" + " }\n" + "}"; Query parsedQuery = parseQuery(query).toQuery(createShardContext()); assertThat(parsedQuery, instanceOf(PointRangeQuery.class)); assertEquals(LongPoint.newRangeQuery(DATE_FIELD_NAME, DateTime.parse("2012-01-01T00:00:00.000+00").getMillis(), DateTime.parse("2030-01-01T00:00:00.000+00").getMillis() - 1), parsedQuery); // Test Invalid format final String invalidQuery = "{\n" + " \"range\" : {\n" + " \"" + DATE_FIELD_NAME + "\" : {\n" + " \"gte\": \"01/01/2012\",\n" + " \"lt\": \"2030\",\n" + " \"format\": \"yyyy\"\n" + " }\n" + " }\n" + "}"; expectThrows(ElasticsearchParseException.class, () -> parseQuery(invalidQuery).toQuery(createShardContext())); } public void testDateRangeBoundaries() throws IOException { assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); String query = "{\n" + " \"range\" : {\n" + " \"" + DATE_FIELD_NAME + "\" : {\n" + " \"gte\": \"2014-11-05||/M\",\n" + " \"lte\": \"2014-12-08||/d\"\n" + " }\n" + " }\n" + "}\n"; Query parsedQuery = parseQuery(query).toQuery(createShardContext()); assertThat(parsedQuery, instanceOf(PointRangeQuery.class)); assertEquals(LongPoint.newRangeQuery(DATE_FIELD_NAME, DateTime.parse("2014-11-01T00:00:00.000+00").getMillis(), DateTime.parse("2014-12-08T23:59:59.999+00").getMillis()), parsedQuery); query = "{\n" + " \"range\" : {\n" + " \"" + DATE_FIELD_NAME + "\" : {\n" + " \"gt\": \"2014-11-05||/M\",\n" + " \"lt\": \"2014-12-08||/d\"\n" + " }\n" + " }\n" + "}"; parsedQuery = parseQuery(query).toQuery(createShardContext()); assertThat(parsedQuery, instanceOf(PointRangeQuery.class)); assertEquals(LongPoint.newRangeQuery(DATE_FIELD_NAME, DateTime.parse("2014-11-30T23:59:59.999+00").getMillis() + 1, DateTime.parse("2014-12-08T00:00:00.000+00").getMillis() - 1), parsedQuery); } public void testDateRangeQueryTimezone() throws IOException { assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); String query = "{\n" + " \"range\" : {\n" + " \"" + DATE_FIELD_NAME + "\" : {\n" + " \"gte\": \"2012-01-01\",\n" + " \"lte\": \"now\",\n" + " \"time_zone\": \"+01:00\"\n" + " }\n" + " }\n" + "}"; QueryShardContext context = createShardContext(); Query parsedQuery = parseQuery(query).toQuery(context); assertThat(parsedQuery, instanceOf(PointRangeQuery.class)); // TODO what else can we assert query = "{\n" + " \"range\" : {\n" + " \"" + INT_FIELD_NAME + "\" : {\n" + " \"gte\": \"0\",\n" + " \"lte\": \"100\",\n" + " \"time_zone\": \"-01:00\"\n" + " }\n" + " }\n" + "}"; QueryBuilder queryBuilder = parseQuery(query); expectThrows(QueryShardException.class, () -> queryBuilder.toQuery(createShardContext())); } public void testFromJson() throws IOException { String json = "{\n" + " \"range\" : {\n" + " \"timestamp\" : {\n" + " \"from\" : \"2015-01-01 00:00:00\",\n" + " \"to\" : \"now\",\n" + " \"include_lower\" : true,\n" + " \"include_upper\" : true,\n" + " \"time_zone\" : \"+01:00\",\n" + " \"boost\" : 1.0\n" + " }\n" + " }\n" + "}"; RangeQueryBuilder parsed = (RangeQueryBuilder) parseQuery(json); checkGeneratedJson(json, parsed); assertEquals(json, "2015-01-01 00:00:00", parsed.from()); assertEquals(json, "now", parsed.to()); } public void testNamedQueryParsing() throws IOException { String json = "{\n" + " \"range\" : {\n" + " \"timestamp\" : {\n" + " \"from\" : \"2015-01-01 00:00:00\",\n" + " \"to\" : \"now\",\n" + " \"boost\" : 1.0,\n" + " \"_name\" : \"my_range\"\n" + " }\n" + " }\n" + "}"; assertNotNull(parseQuery(json)); final String deprecatedJson = "{\n" + " \"range\" : {\n" + " \"timestamp\" : {\n" + " \"from\" : \"2015-01-01 00:00:00\",\n" + " \"to\" : \"now\",\n" + " \"boost\" : 1.0\n" + " },\n" + " \"_name\" : \"my_range\"\n" + " }\n" + "}"; // non strict parsing should accept "_name" on top level assertNotNull(parseQuery(json, ParseFieldMatcher.EMPTY)); // with strict parsing, ParseField will throw exception IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parseQuery(deprecatedJson, ParseFieldMatcher.STRICT)); assertEquals("Deprecated field [_name] used, replaced by [query name is not supported in short version of range query]", e.getMessage()); } public void testRewriteDateToMatchAll() throws IOException { String fieldName = randomAsciiOfLengthBetween(1, 20); RangeQueryBuilder query = new RangeQueryBuilder(fieldName) { @Override protected MappedFieldType.Relation getRelation(QueryRewriteContext queryRewriteContext) throws IOException { return Relation.WITHIN; } }; DateTime queryFromValue = new DateTime(2015, 1, 1, 0, 0, 0, ISOChronology.getInstanceUTC()); DateTime queryToValue = new DateTime(2016, 1, 1, 0, 0, 0, ISOChronology.getInstanceUTC()); query.from(queryFromValue); query.to(queryToValue); QueryShardContext queryShardContext = createShardContext(); QueryBuilder rewritten = query.rewrite(queryShardContext); assertThat(rewritten, instanceOf(RangeQueryBuilder.class)); RangeQueryBuilder rewrittenRange = (RangeQueryBuilder) rewritten; assertThat(rewrittenRange.fieldName(), equalTo(fieldName)); assertThat(rewrittenRange.from(), equalTo(null)); assertThat(rewrittenRange.to(), equalTo(null)); } public void testRewriteDateToMatchNone() throws IOException { String fieldName = randomAsciiOfLengthBetween(1, 20); RangeQueryBuilder query = new RangeQueryBuilder(fieldName) { @Override protected MappedFieldType.Relation getRelation(QueryRewriteContext queryRewriteContext) throws IOException { return Relation.DISJOINT; } }; DateTime queryFromValue = new DateTime(2015, 1, 1, 0, 0, 0, ISOChronology.getInstanceUTC()); DateTime queryToValue = new DateTime(2016, 1, 1, 0, 0, 0, ISOChronology.getInstanceUTC()); query.from(queryFromValue); query.to(queryToValue); QueryShardContext queryShardContext = createShardContext(); QueryBuilder rewritten = query.rewrite(queryShardContext); assertThat(rewritten, instanceOf(MatchNoneQueryBuilder.class)); } public void testRewriteDateToSame() throws IOException { String fieldName = randomAsciiOfLengthBetween(1, 20); RangeQueryBuilder query = new RangeQueryBuilder(fieldName) { @Override protected MappedFieldType.Relation getRelation(QueryRewriteContext queryRewriteContext) throws IOException { return Relation.INTERSECTS; } }; DateTime queryFromValue = new DateTime(2015, 1, 1, 0, 0, 0, ISOChronology.getInstanceUTC()); DateTime queryToValue = new DateTime(2016, 1, 1, 0, 0, 0, ISOChronology.getInstanceUTC()); query.from(queryFromValue); query.to(queryToValue); QueryShardContext queryShardContext = createShardContext(); QueryBuilder rewritten = query.rewrite(queryShardContext); assertThat(rewritten, sameInstance(query)); } public void testRewriteOpenBoundsToSame() throws IOException { String fieldName = randomAsciiOfLengthBetween(1, 20); RangeQueryBuilder query = new RangeQueryBuilder(fieldName) { @Override protected MappedFieldType.Relation getRelation(QueryRewriteContext queryRewriteContext) throws IOException { return Relation.INTERSECTS; } }; QueryShardContext queryShardContext = createShardContext(); QueryBuilder rewritten = query.rewrite(queryShardContext); assertThat(rewritten, sameInstance(query)); } public void testParseFailsWithMultipleFields() throws IOException { String json = "{\n" + " \"range\": {\n" + " \"age\": {\n" + " \"gte\": 30,\n" + " \"lte\": 40\n" + " },\n" + " \"price\": {\n" + " \"gte\": 10,\n" + " \"lte\": 30\n" + " }\n" + " }\n" + " }"; ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(json)); assertEquals("[range] query doesn't support multiple fields, found [age] and [price]", e.getMessage()); } public void testParseFailsWithMultipleFieldsWhenOneIsDate() throws IOException { String json = "{\n" + " \"range\": {\n" + " \"age\": {\n" + " \"gte\": 30,\n" + " \"lte\": 40\n" + " },\n" + " \"" + DATE_FIELD_NAME + "\": {\n" + " \"gte\": \"2016-09-13 05:01:14\"\n" + " }\n" + " }\n" + " }"; ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(json)); assertEquals("[range] query doesn't support multiple fields, found [age] and [" + DATE_FIELD_NAME + "]", e.getMessage()); } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. package com.azure.identity.implementation; import com.azure.core.credential.AccessToken; import com.azure.core.credential.TokenRequestContext; import com.azure.identity.util.TestUtils; import com.microsoft.aad.msal4j.AsymmetricKeyCredential; import com.microsoft.aad.msal4j.ClientCredentialParameters; import com.microsoft.aad.msal4j.ClientSecret; import com.microsoft.aad.msal4j.ConfidentialClientApplication; import com.microsoft.aad.msal4j.DeviceCodeFlowParameters; import com.microsoft.aad.msal4j.MsalServiceException; import com.microsoft.aad.msal4j.PublicClientApplication; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PowerMockIgnore; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; import java.time.OffsetDateTime; import java.time.ZoneOffset; import java.util.Random; import java.util.UUID; import java.util.concurrent.CompletableFuture; import java.util.concurrent.atomic.AtomicBoolean; import static org.junit.Assert.fail; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.when; import static org.powermock.api.mockito.PowerMockito.whenNew; @RunWith(PowerMockRunner.class) @PrepareForTest({ ConfidentialClientApplication.class, ConfidentialClientApplication.Builder.class, PublicClientApplication.class, PublicClientApplication.Builder.class, IdentityClient.class }) @PowerMockIgnore({"com.sun.org.apache.xerces.*", "javax.xml.*", "org.xml.*"}) public class IdentityClientTests { private static final Random RANDOM = new Random(); private final String tenantId = "contoso.com"; private final String clientId = UUID.randomUUID().toString(); @Test public void testValidSecret() throws Exception { // setup String secret = "secret"; String accessToken = "token"; TokenRequestContext request = new TokenRequestContext().addScopes("https://management.azure.com"); OffsetDateTime expiresOn = OffsetDateTime.now(ZoneOffset.UTC).plusHours(1); // mock mockForClientSecret(secret, request, accessToken, expiresOn); // test IdentityClient client = new IdentityClientBuilder().tenantId(tenantId).clientId(clientId).build(); AccessToken token = client.authenticateWithClientSecret(secret, request).block(); Assert.assertEquals(accessToken, token.getToken()); Assert.assertEquals(expiresOn.getSecond(), token.getExpiresAt().getSecond()); } @Test public void testInvalidSecret() throws Exception { // setup String secret = "secret"; String accessToken = "token"; TokenRequestContext request = new TokenRequestContext().addScopes("https://management.azure.com"); OffsetDateTime expiresOn = OffsetDateTime.now(ZoneOffset.UTC).plusHours(1); // mock mockForClientSecret(secret, request, accessToken, expiresOn); // test try { IdentityClient client = new IdentityClientBuilder().tenantId(tenantId).clientId(clientId).build(); client.authenticateWithClientSecret("bad secret", request).block(); fail(); } catch (MsalServiceException e) { Assert.assertEquals("Invalid clientSecret", e.getMessage()); } } @Test public void testValidCertificate() throws Exception { // setup String pfxPath = getClass().getResource("/keyStore.pfx").getPath(); String accessToken = "token"; TokenRequestContext request = new TokenRequestContext().addScopes("https://management.azure.com"); OffsetDateTime expiresOn = OffsetDateTime.now(ZoneOffset.UTC).plusHours(1); // mock mockForClientCertificate(request, accessToken, expiresOn); // test IdentityClient client = new IdentityClientBuilder().tenantId(tenantId).clientId(clientId).build(); AccessToken token = client.authenticateWithPfxCertificate(pfxPath, "StrongPass!123", request).block(); Assert.assertEquals(accessToken, token.getToken()); Assert.assertEquals(expiresOn.getSecond(), token.getExpiresAt().getSecond()); } @Test public void testInvalidCertificatePassword() throws Exception { // setup String pfxPath = getClass().getResource("/keyStore.pfx").getPath(); String accessToken = "token"; TokenRequestContext request = new TokenRequestContext().addScopes("https://management.azure.com"); OffsetDateTime expiresOn = OffsetDateTime.now(ZoneOffset.UTC).plusHours(1); // mock mockForClientCertificate(request, accessToken, expiresOn); // test try { IdentityClient client = new IdentityClientBuilder().tenantId(tenantId).clientId(clientId).build(); client.authenticateWithPfxCertificate(pfxPath, "BadPassword", request).block(); fail(); } catch (Exception e) { Assert.assertTrue(e.getMessage().contains("password was incorrect")); } } @Test public void testValidDeviceCodeFlow() throws Exception { // setup String accessToken = "token"; TokenRequestContext request = new TokenRequestContext().addScopes("https://management.azure.com"); OffsetDateTime expiresOn = OffsetDateTime.now(ZoneOffset.UTC).plusHours(1); // mock mockForDeviceCodeFlow(request, accessToken, expiresOn); // test IdentityClient client = new IdentityClientBuilder().tenantId(tenantId).clientId(clientId).build(); AccessToken token = client.authenticateWithDeviceCode(request, deviceCodeChallenge -> { /* do nothing */ }).block(); Assert.assertEquals(accessToken, token.getToken()); Assert.assertEquals(expiresOn.getSecond(), token.getExpiresAt().getSecond()); } /****** mocks ******/ private void mockForClientSecret(String secret, TokenRequestContext request, String accessToken, OffsetDateTime expiresOn) throws Exception { ConfidentialClientApplication application = PowerMockito.mock(ConfidentialClientApplication.class); when(application.acquireToken(any(ClientCredentialParameters.class))).thenAnswer(invocation -> { ClientCredentialParameters argument = (ClientCredentialParameters) invocation.getArguments()[0]; if (argument.scopes().size() == 1 && request.getScopes().get(0).equals(argument.scopes().iterator().next())) { return TestUtils.getMockAuthenticationResult(accessToken, expiresOn); } else { return CompletableFuture.runAsync(() -> { throw new MsalServiceException("Invalid request", "InvalidScopes"); }); } }); ConfidentialClientApplication.Builder builder = PowerMockito.mock(ConfidentialClientApplication.Builder.class); when(builder.build()).thenReturn(application); when(builder.authority(any())).thenReturn(builder); whenNew(ConfidentialClientApplication.Builder.class).withAnyArguments().thenAnswer(invocation -> { String cid = (String) invocation.getArguments()[0]; ClientSecret clientSecret = (ClientSecret) invocation.getArguments()[1]; if (!clientId.equals(cid)) { throw new MsalServiceException("Invalid clientId", "InvalidClientId"); } if (!secret.equals(clientSecret.clientSecret())) { throw new MsalServiceException("Invalid clientSecret", "InvalidClientSecret"); } return builder; }); } private void mockForClientCertificate(TokenRequestContext request, String accessToken, OffsetDateTime expiresOn) throws Exception { ConfidentialClientApplication application = PowerMockito.mock(ConfidentialClientApplication.class); when(application.acquireToken(any(ClientCredentialParameters.class))).thenAnswer(invocation -> { ClientCredentialParameters argument = (ClientCredentialParameters) invocation.getArguments()[0]; if (argument.scopes().size() == 1 && request.getScopes().get(0).equals(argument.scopes().iterator().next())) { return TestUtils.getMockAuthenticationResult(accessToken, expiresOn); } else { return CompletableFuture.runAsync(() -> { throw new MsalServiceException("Invalid request", "InvalidScopes"); }); } }); ConfidentialClientApplication.Builder builder = PowerMockito.mock(ConfidentialClientApplication.Builder.class); when(builder.build()).thenReturn(application); when(builder.authority(any())).thenReturn(builder); whenNew(ConfidentialClientApplication.Builder.class).withAnyArguments().thenAnswer(invocation -> { String cid = (String) invocation.getArguments()[0]; AsymmetricKeyCredential keyCredential = (AsymmetricKeyCredential) invocation.getArguments()[1]; if (!clientId.equals(cid)) { throw new MsalServiceException("Invalid clientId", "InvalidClientId"); } if (keyCredential == null || keyCredential.key() == null) { throw new MsalServiceException("Invalid clientCertificate", "InvalidClientCertificate"); } return builder; }); } private void mockForDeviceCodeFlow(TokenRequestContext request, String accessToken, OffsetDateTime expiresOn) throws Exception { PublicClientApplication application = PowerMockito.mock(PublicClientApplication.class); AtomicBoolean cached = new AtomicBoolean(false); when(application.acquireToken(any(DeviceCodeFlowParameters.class))).thenAnswer(invocation -> { DeviceCodeFlowParameters argument = (DeviceCodeFlowParameters) invocation.getArguments()[0]; if (argument.scopes().size() != 1 || !request.getScopes().get(0).equals(argument.scopes().iterator().next())) { return CompletableFuture.runAsync(() -> { throw new MsalServiceException("Invalid request", "InvalidScopes"); }); } if (argument.deviceCodeConsumer() == null) { return CompletableFuture.runAsync(() -> { throw new MsalServiceException("Invalid device code consumer", "InvalidDeviceCodeConsumer"); }); } cached.set(true); return TestUtils.getMockAuthenticationResult(accessToken, expiresOn); }); PublicClientApplication.Builder builder = PowerMockito.mock(PublicClientApplication.Builder.class); when(builder.build()).thenReturn(application); when(builder.authority(any())).thenReturn(builder); whenNew(PublicClientApplication.Builder.class).withArguments(clientId).thenReturn(builder); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.planner.plan.nodes.exec.common; import org.apache.flink.api.common.io.OutputFormat; import org.apache.flink.api.dag.Transformation; import org.apache.flink.api.java.typeutils.InputTypeConfigurable; import org.apache.flink.runtime.state.KeyGroupRangeAssignment; import org.apache.flink.streaming.api.datastream.DataStream; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; import org.apache.flink.streaming.api.functions.sink.OutputFormatSinkFunction; import org.apache.flink.streaming.api.functions.sink.SinkFunction; import org.apache.flink.streaming.api.operators.SimpleOperatorFactory; import org.apache.flink.streaming.api.operators.StreamFilter; import org.apache.flink.streaming.api.transformations.LegacySinkTransformation; import org.apache.flink.streaming.api.transformations.OneInputTransformation; import org.apache.flink.streaming.api.transformations.PartitionTransformation; import org.apache.flink.streaming.api.transformations.SinkTransformation; import org.apache.flink.streaming.runtime.partitioner.KeyGroupStreamPartitioner; import org.apache.flink.table.api.TableConfig; import org.apache.flink.table.api.TableException; import org.apache.flink.table.api.config.ExecutionConfigOptions; import org.apache.flink.table.catalog.ResolvedSchema; import org.apache.flink.table.catalog.UniqueConstraint; import org.apache.flink.table.connector.ChangelogMode; import org.apache.flink.table.connector.ParallelismProvider; import org.apache.flink.table.connector.sink.DataStreamSinkProvider; import org.apache.flink.table.connector.sink.DynamicTableSink; import org.apache.flink.table.connector.sink.OutputFormatProvider; import org.apache.flink.table.connector.sink.SinkFunctionProvider; import org.apache.flink.table.connector.sink.SinkProvider; import org.apache.flink.table.data.RowData; import org.apache.flink.table.planner.connectors.TransformationSinkProvider; import org.apache.flink.table.planner.plan.nodes.exec.ExecNode; import org.apache.flink.table.planner.plan.nodes.exec.ExecNodeBase; import org.apache.flink.table.planner.plan.nodes.exec.InputProperty; import org.apache.flink.table.planner.plan.nodes.exec.MultipleTransformationTranslator; import org.apache.flink.table.planner.plan.nodes.exec.spec.DynamicTableSinkSpec; import org.apache.flink.table.planner.plan.utils.KeySelectorUtil; import org.apache.flink.table.runtime.connector.sink.SinkRuntimeProviderContext; import org.apache.flink.table.runtime.keyselector.RowDataKeySelector; import org.apache.flink.table.runtime.operators.sink.SinkNotNullEnforcer; import org.apache.flink.table.runtime.operators.sink.SinkOperator; import org.apache.flink.table.runtime.typeutils.InternalTypeInfo; import org.apache.flink.table.types.logical.LogicalType; import org.apache.flink.table.types.logical.RowType; import org.apache.flink.types.RowKind; import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.annotation.JsonIgnore; import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.annotation.JsonProperty; import javax.annotation.Nullable; import java.util.Arrays; import java.util.List; import java.util.Optional; import java.util.stream.Collectors; import java.util.stream.IntStream; import static org.apache.flink.util.Preconditions.checkArgument; /** * Base {@link ExecNode} to write data to an external sink defined by a {@link DynamicTableSink}. */ public abstract class CommonExecSink extends ExecNodeBase<Object> implements MultipleTransformationTranslator<Object> { public static final String FIELD_NAME_DYNAMIC_TABLE_SINK = "dynamicTableSink"; @JsonProperty(FIELD_NAME_DYNAMIC_TABLE_SINK) protected final DynamicTableSinkSpec tableSinkSpec; @JsonIgnore private final ChangelogMode changelogMode; @JsonIgnore private final boolean isBounded; protected CommonExecSink( DynamicTableSinkSpec tableSinkSpec, ChangelogMode changelogMode, boolean isBounded, int id, List<InputProperty> inputProperties, LogicalType outputType, String description) { super(id, inputProperties, outputType, description); this.tableSinkSpec = tableSinkSpec; this.changelogMode = changelogMode; this.isBounded = isBounded; } public DynamicTableSinkSpec getTableSinkSpec() { return tableSinkSpec; } @SuppressWarnings("unchecked") protected Transformation<Object> createSinkTransformation( StreamExecutionEnvironment env, TableConfig tableConfig, Transformation<RowData> inputTransform, int rowtimeFieldIndex) { final DynamicTableSink tableSink = tableSinkSpec.getTableSink(); final DynamicTableSink.SinkRuntimeProvider runtimeProvider = tableSink.getSinkRuntimeProvider(new SinkRuntimeProviderContext(isBounded)); final ResolvedSchema schema = tableSinkSpec.getCatalogTable().getResolvedSchema(); final RowType physicalRowType = getPhysicalRowType(schema); inputTransform = applyNotNullEnforcer(tableConfig, physicalRowType, inputTransform); if (runtimeProvider instanceof DataStreamSinkProvider) { if (runtimeProvider instanceof ParallelismProvider) { throw new TableException( "`DataStreamSinkProvider` is not allowed to work with" + " `ParallelismProvider`, " + "please see document of `ParallelismProvider`"); } final DataStream<RowData> dataStream = new DataStream<>(env, inputTransform); final DataStreamSinkProvider provider = (DataStreamSinkProvider) runtimeProvider; return provider.consumeDataStream(dataStream).getTransformation(); } else if (runtimeProvider instanceof TransformationSinkProvider) { final TransformationSinkProvider provider = (TransformationSinkProvider) runtimeProvider; return (Transformation<Object>) provider.createTransformation( TransformationSinkProvider.Context.of( inputTransform, rowtimeFieldIndex)); } else { checkArgument( runtimeProvider instanceof ParallelismProvider, "%s should implement ParallelismProvider interface.", runtimeProvider.getClass().getName()); final int inputParallelism = inputTransform.getParallelism(); final int sinkParallelism = deriveSinkParallelism((ParallelismProvider) runtimeProvider, inputParallelism); // apply keyBy partition transformation if needed inputTransform = applyKeyByForDifferentParallelism( physicalRowType, schema.getPrimaryKey().orElse(null), inputTransform, inputParallelism, sinkParallelism); final SinkFunction<RowData> sinkFunction; if (runtimeProvider instanceof SinkFunctionProvider) { sinkFunction = ((SinkFunctionProvider) runtimeProvider).createSinkFunction(); return createSinkFunctionTransformation( sinkFunction, env, inputTransform, rowtimeFieldIndex, sinkParallelism); } else if (runtimeProvider instanceof OutputFormatProvider) { OutputFormat<RowData> outputFormat = ((OutputFormatProvider) runtimeProvider).createOutputFormat(); sinkFunction = new OutputFormatSinkFunction<>(outputFormat); return createSinkFunctionTransformation( sinkFunction, env, inputTransform, rowtimeFieldIndex, sinkParallelism); } else if (runtimeProvider instanceof SinkProvider) { return new SinkTransformation<>( inputTransform, ((SinkProvider) runtimeProvider).createSink(), getDescription(), sinkParallelism); } else { throw new TableException("This should not happen."); } } } /** * Apply an operator to filter or report error to process not-null values for not-null fields. */ private Transformation<RowData> applyNotNullEnforcer( TableConfig config, RowType physicalRowType, Transformation<RowData> inputTransform) { final ExecutionConfigOptions.NotNullEnforcer notNullEnforcer = config.getConfiguration() .get(ExecutionConfigOptions.TABLE_EXEC_SINK_NOT_NULL_ENFORCER); final int[] notNullFieldIndices = getNotNullFieldIndices(physicalRowType); final String[] fieldNames = physicalRowType.getFieldNames().toArray(new String[0]); if (notNullFieldIndices.length > 0) { final SinkNotNullEnforcer enforcer = new SinkNotNullEnforcer(notNullEnforcer, notNullFieldIndices, fieldNames); final List<String> notNullFieldNames = Arrays.stream(notNullFieldIndices) .mapToObj(idx -> fieldNames[idx]) .collect(Collectors.toList()); final String operatorName = String.format( "NotNullEnforcer(fields=[%s])", String.join(", ", notNullFieldNames)); return new OneInputTransformation<>( inputTransform, operatorName, new StreamFilter<>(enforcer), getInputTypeInfo(), inputTransform.getParallelism()); } else { // there are no not-null fields, just skip adding the enforcer operator return inputTransform; } } private int[] getNotNullFieldIndices(RowType physicalType) { return IntStream.range(0, physicalType.getFieldCount()) .filter(pos -> !physicalType.getTypeAt(pos).isNullable()) .toArray(); } /** * Returns the parallelism of sink operator, it assumes the sink runtime provider implements * {@link ParallelismProvider}. It returns parallelism defined in {@link ParallelismProvider} if * the parallelism is provided, otherwise it uses parallelism of input transformation. */ private int deriveSinkParallelism( ParallelismProvider parallelismProvider, int inputParallelism) { final Optional<Integer> parallelismOptional = parallelismProvider.getParallelism(); if (parallelismOptional.isPresent()) { int sinkParallelism = parallelismOptional.get(); if (sinkParallelism <= 0) { throw new TableException( String.format( "Table: %s configured sink parallelism: " + "%s should not be less than zero or equal to zero", tableSinkSpec.getObjectIdentifier().asSummaryString(), sinkParallelism)); } return sinkParallelism; } else { // use input parallelism if not specified return inputParallelism; } } /** * Apply a keyBy partition transformation if the parallelism of sink operator and input operator * is different and sink changelog-mode is not insert-only. This is used to guarantee the strict * ordering of changelog messages. */ private Transformation<RowData> applyKeyByForDifferentParallelism( RowType sinkRowType, @Nullable UniqueConstraint primaryKey, Transformation<RowData> inputTransform, int inputParallelism, int sinkParallelism) { final int[] primaryKeys = getPrimaryKeyIndices(sinkRowType, primaryKey); if (inputParallelism == sinkParallelism || changelogMode.containsOnly(RowKind.INSERT)) { // if the inputParallelism is equals to the parallelism or insert-only mode, do nothing. return inputTransform; } else if (primaryKeys.length == 0) { throw new TableException( String.format( "Table: %s configured sink parallelism is: %s, while the input parallelism is: " + "%s. Since configured parallelism is different from input parallelism and the changelog mode " + "contains [%s], which is not INSERT_ONLY mode, primary key is required but no primary key is found", tableSinkSpec.getObjectIdentifier().asSummaryString(), sinkParallelism, inputParallelism, changelogMode.getContainedKinds().stream() .map(Enum::toString) .collect(Collectors.joining(",")))); } else { // keyBy before sink final RowDataKeySelector selector = KeySelectorUtil.getRowDataSelector(primaryKeys, getInputTypeInfo()); final KeyGroupStreamPartitioner<RowData, RowData> partitioner = new KeyGroupStreamPartitioner<>( selector, KeyGroupRangeAssignment.DEFAULT_LOWER_BOUND_MAX_PARALLELISM); Transformation<RowData> partitionedTransform = new PartitionTransformation<>(inputTransform, partitioner); partitionedTransform.setParallelism(sinkParallelism); return partitionedTransform; } } private int[] getPrimaryKeyIndices(RowType sinkRowType, @Nullable UniqueConstraint primaryKey) { if (primaryKey == null) { return new int[0]; } return primaryKey.getColumns().stream().mapToInt(sinkRowType::getFieldIndex).toArray(); } private Transformation<Object> createSinkFunctionTransformation( SinkFunction<RowData> sinkFunction, StreamExecutionEnvironment env, Transformation<RowData> inputTransformation, int rowtimeFieldIndex, int sinkParallelism) { final SinkOperator operator = new SinkOperator(env.clean(sinkFunction), rowtimeFieldIndex); if (sinkFunction instanceof InputTypeConfigurable) { ((InputTypeConfigurable) sinkFunction) .setInputType(getInputTypeInfo(), env.getConfig()); } return new LegacySinkTransformation<>( inputTransformation, getDescription(), SimpleOperatorFactory.of(operator), sinkParallelism); } private InternalTypeInfo<RowData> getInputTypeInfo() { return InternalTypeInfo.of(getInputEdges().get(0).getOutputType()); } private RowType getPhysicalRowType(ResolvedSchema schema) { return (RowType) schema.toPhysicalRowDataType().getLogicalType(); } }
/* * Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * you may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.device.mgt.mobile.windows.api.services.wstep.impl; import org.apache.commons.codec.binary.Base64; import org.apache.commons.codec.digest.DigestUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.cxf.headers.Header; import org.apache.cxf.helpers.CastUtils; import org.apache.cxf.jaxws.context.WrappedMessageContext; import org.apache.cxf.message.Message; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.NamedNodeMap; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.wso2.carbon.certificate.mgt.core.exception.KeystoreException; import org.wso2.carbon.certificate.mgt.core.service.CertificateManagementServiceImpl; import org.wso2.carbon.context.PrivilegedCarbonContext; import org.wso2.carbon.device.mgt.common.DeviceManagementException; import org.wso2.carbon.device.mgt.common.configuration.mgt.ConfigurationEntry; import org.wso2.carbon.device.mgt.mobile.windows.api.common.PluginConstants; import org.wso2.carbon.device.mgt.mobile.windows.api.common.exceptions.CertificateGenerationException; import org.wso2.carbon.device.mgt.mobile.windows.api.common.exceptions.SyncmlMessageFormatException; import org.wso2.carbon.device.mgt.mobile.windows.api.common.exceptions.WAPProvisioningException; import org.wso2.carbon.device.mgt.mobile.windows.api.common.exceptions.WindowsDeviceEnrolmentException; import org.wso2.carbon.device.mgt.mobile.windows.api.common.util.DeviceUtil; import org.wso2.carbon.device.mgt.mobile.windows.api.common.util.WindowsAPIUtils; import org.wso2.carbon.device.mgt.mobile.windows.api.operations.util.SyncmlCredentialUtil; import org.wso2.carbon.device.mgt.mobile.windows.api.services.wstep.CertificateEnrollmentService; import org.wso2.carbon.device.mgt.mobile.windows.api.services.wstep.beans.AdditionalContext; import org.wso2.carbon.device.mgt.mobile.windows.api.services.wstep.beans.BinarySecurityToken; import org.wso2.carbon.device.mgt.mobile.windows.api.services.wstep.beans.RequestSecurityTokenResponse; import org.wso2.carbon.device.mgt.mobile.windows.api.services.wstep.beans.RequestedSecurityToken; import org.wso2.carbon.device.mgt.mobile.windows.impl.dto.MobileCacheEntry; import org.xml.sax.SAXException; import javax.annotation.Resource; import javax.jws.WebService; import javax.servlet.ServletContext; import javax.xml.XMLConstants; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerException; import javax.xml.transform.TransformerFactory; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.stream.StreamResult; import javax.xml.ws.BindingType; import javax.xml.ws.Holder; import javax.xml.ws.WebServiceContext; import javax.xml.ws.handler.MessageContext; import javax.xml.ws.soap.Addressing; import javax.xml.ws.soap.SOAPBinding; import java.io.File; import java.io.IOException; import java.io.StringWriter; import java.security.cert.CertificateEncodingException; import java.security.cert.X509Certificate; import java.util.List; /** * Implementation class of CertificateEnrollmentService interface. This class implements MS-WSTEP * protocol. */ @WebService(endpointInterface = PluginConstants.CERTIFICATE_ENROLLMENT_SERVICE_ENDPOINT, targetNamespace = PluginConstants.DEVICE_ENROLLMENT_SERVICE_TARGET_NAMESPACE) @Addressing(enabled = true, required = true) @BindingType(value = SOAPBinding.SOAP12HTTP_BINDING) public class CertificateEnrollmentServiceImpl implements CertificateEnrollmentService { private static Log log = LogFactory.getLog( org.wso2.carbon.device.mgt.mobile.windows.api.services.wstep.impl.CertificateEnrollmentServiceImpl.class); private X509Certificate rootCACertificate; private String pollingFrequency; private String provisioningURL; private String domain; @Resource private WebServiceContext context; /** * This method implements MS-WSTEP for Certificate Enrollment Service. * * @param tokenType - Device Enrolment Token type is received via device * @param requestType - WS-Trust request type * @param binarySecurityToken - CSR from device * @param additionalContext - Device type and OS version is received * @param response - Response will include wap-provisioning xml * @WindowsDeviceEnrolmentException - */ @Override public void requestSecurityToken(String tokenType, String requestType, String binarySecurityToken, AdditionalContext additionalContext, Holder<RequestSecurityTokenResponse> response) throws WindowsDeviceEnrolmentException { String headerBinarySecurityToken = null; String headerTo = null; String encodedWap; List<Header> headers = getHeaders(); for (Header headerElement : headers) { String nodeName = headerElement.getName().getLocalPart(); if (PluginConstants.SECURITY.equals(nodeName)) { Element element = (Element) headerElement.getObject(); headerBinarySecurityToken = element.getFirstChild().getNextSibling().getFirstChild().getTextContent(); } if (PluginConstants.TO.equals(nodeName)) { Element toElement = (Element) headerElement.getObject(); headerTo = toElement.getFirstChild().getTextContent(); } } String[] splitEmail = headerTo.split("(/ENROLLMENTSERVER)"); String email = splitEmail[PluginConstants.CertificateEnrolment.EMAIL_SEGMENT]; String[] splitDomain = email.split("(EnterpriseEnrollment.)"); domain = splitDomain[PluginConstants.CertificateEnrolment.DOMAIN_SEGMENT]; provisioningURL = PluginConstants.CertificateEnrolment.ENROLL_SUBDOMAIN + domain + PluginConstants.CertificateEnrolment.SYNCML_PROVISIONING_SERVICE_URL; List<ConfigurationEntry> tenantConfigurations; try { if ((tenantConfigurations = WindowsAPIUtils.getTenantConfigurationData()) != null) { for (ConfigurationEntry configurationEntry : tenantConfigurations) { if ((PluginConstants.TenantConfigProperties.NOTIFIER_FREQUENCY.equals( configurationEntry.getName()))) { pollingFrequency = configurationEntry.getValue().toString(); } else { pollingFrequency = PluginConstants.TenantConfigProperties.DEFAULT_FREQUENCY; } } } else { pollingFrequency = PluginConstants.TenantConfigProperties.DEFAULT_FREQUENCY; String msg = "Tenant configurations are not initialized yet."; log.error(msg); } ServletContext ctx = (ServletContext) context.getMessageContext(). get(MessageContext.SERVLET_CONTEXT); File wapProvisioningFile = (File) ctx.getAttribute(PluginConstants.CONTEXT_WAP_PROVISIONING_FILE); if (log.isDebugEnabled()) { log.debug("Received CSR from Device:" + binarySecurityToken); } String wapProvisioningFilePath = wapProvisioningFile.getPath(); RequestSecurityTokenResponse requestSecurityTokenResponse = new RequestSecurityTokenResponse(); requestSecurityTokenResponse.setTokenType(PluginConstants.CertificateEnrolment.TOKEN_TYPE); encodedWap = prepareWapProvisioningXML(binarySecurityToken, wapProvisioningFilePath, headerBinarySecurityToken); RequestedSecurityToken requestedSecurityToken = new RequestedSecurityToken(); BinarySecurityToken binarySecToken = new BinarySecurityToken(); binarySecToken.setValueType(PluginConstants.CertificateEnrolment.VALUE_TYPE); binarySecToken.setEncodingType(PluginConstants.CertificateEnrolment.ENCODING_TYPE); binarySecToken.setToken(encodedWap); requestedSecurityToken.setBinarySecurityToken(binarySecToken); requestSecurityTokenResponse.setRequestedSecurityToken(requestedSecurityToken); requestSecurityTokenResponse.setRequestID(PluginConstants.CertificateEnrolment.REQUEST_ID); response.value = requestSecurityTokenResponse; } catch (CertificateGenerationException e) { String msg = "Problem occurred while generating certificate."; log.error(msg, e); throw new WindowsDeviceEnrolmentException(msg, e); } catch (WAPProvisioningException e) { String msg = "Problem occurred while generating wap-provisioning file."; log.error(msg, e); throw new WindowsDeviceEnrolmentException(msg, e); } catch (DeviceManagementException e) { String msg = "Error occurred while getting tenant configurations."; log.error(msg); throw new WindowsDeviceEnrolmentException(msg, e); } finally { PrivilegedCarbonContext.endTenantFlow(); } } /** * Method used to Convert the Document object into a String. * * @param document - Wap provisioning XML document * @return - String representation of wap provisioning XML document * @throws TransformerException */ private String convertDocumentToString(Document document) throws TransformerException { DOMSource DOMSource = new DOMSource(document); StringWriter stringWriter = new StringWriter(); StreamResult streamResult = new StreamResult(stringWriter); TransformerFactory transformerFactory = TransformerFactory.newInstance(); Transformer transformer = transformerFactory.newTransformer(); transformer.transform(DOMSource, streamResult); return stringWriter.toString(); } /** * This method prepares the wap-provisioning file by including relevant certificates etc. * * @param binarySecurityToken - CSR from device * @param wapProvisioningFilePath - File path of wap-provisioning file * @return - base64 encoded final wap-provisioning file as a String * @throws CertificateGenerationException * @throws org.wso2.carbon.device.mgt.mobile.windows.api.common.exceptions.WAPProvisioningException */ private String prepareWapProvisioningXML(String binarySecurityToken, String wapProvisioningFilePath, String headerBst) throws CertificateGenerationException, WAPProvisioningException, WindowsDeviceEnrolmentException { String rootCertEncodedString; String signedCertEncodedString; X509Certificate signedCertificate; String provisioningXmlString; CertificateManagementServiceImpl certMgtServiceImpl = CertificateManagementServiceImpl.getInstance(); Base64 base64Encoder = new Base64(); try { rootCACertificate = (X509Certificate) certMgtServiceImpl.getCACertificate(); rootCertEncodedString = base64Encoder.encodeAsString(rootCACertificate.getEncoded()); signedCertificate = certMgtServiceImpl.getSignedCertificateFromCSR(binarySecurityToken); signedCertEncodedString = base64Encoder.encodeAsString(signedCertificate.getEncoded()); DocumentBuilderFactory domFactory = DocumentBuilderFactory.newInstance(); domFactory.setFeature(XMLConstants.FEATURE_SECURE_PROCESSING, true); DocumentBuilder builder = domFactory.newDocumentBuilder(); Document document = builder.parse(wapProvisioningFilePath); NodeList wapParm = document.getElementsByTagName(PluginConstants.CertificateEnrolment.PARM); Node caCertificatePosition = wapParm.item(PluginConstants.CertificateEnrolment.CA_CERTIFICATE_POSITION); //Adding SHA1 CA certificate finger print to wap-provisioning xml. caCertificatePosition.getParentNode().getAttributes().getNamedItem(PluginConstants. CertificateEnrolment.TYPE).setTextContent(String.valueOf( DigestUtils.sha1Hex(rootCACertificate.getEncoded())).toUpperCase()); //Adding encoded CA certificate to wap-provisioning file after removing new line // characters. NamedNodeMap rootCertAttributes = caCertificatePosition.getAttributes(); Node rootCertNode = rootCertAttributes.getNamedItem(PluginConstants.CertificateEnrolment.VALUE); rootCertEncodedString = rootCertEncodedString.replaceAll("\n", ""); rootCertNode.setTextContent(rootCertEncodedString); if (log.isDebugEnabled()) { log.debug("Root certificate: " + rootCertEncodedString); } Node signedCertificatePosition = wapParm.item(PluginConstants.CertificateEnrolment. SIGNED_CERTIFICATE_POSITION); //Adding SHA1 signed certificate finger print to wap-provisioning xml. signedCertificatePosition.getParentNode().getAttributes().getNamedItem(PluginConstants. CertificateEnrolment.TYPE).setTextContent(String.valueOf( DigestUtils.sha1Hex(signedCertificate.getEncoded())).toUpperCase()); //Adding encoded signed certificate to wap-provisioning file after removing new line // characters. NamedNodeMap clientCertAttributes = signedCertificatePosition.getAttributes(); Node clientEncodedNode = clientCertAttributes.getNamedItem(PluginConstants.CertificateEnrolment.VALUE); signedCertEncodedString = signedCertEncodedString.replaceAll("\n", ""); clientEncodedNode.setTextContent(signedCertEncodedString); if (log.isDebugEnabled()) { log.debug("Signed certificate: " + signedCertEncodedString); } //Adding domainName to wap-provisioning xml. Node domainPosition = wapParm.item(PluginConstants.CertificateEnrolment.DOMAIN_POSITION); NamedNodeMap domainAttribute = domainPosition.getAttributes(); Node domainNode = domainAttribute.getNamedItem(PluginConstants.CertificateEnrolment.VALUE); domainNode.setTextContent(domain); //Adding Next provisioning service URL to wap-provisioning xml. Node syncmlServicePosition = wapParm.item(PluginConstants.CertificateEnrolment. SYNCML_PROVISIONING_ADDR_POSITION); NamedNodeMap syncmlServiceAttribute = syncmlServicePosition.getAttributes(); Node syncmlServiceNode = syncmlServiceAttribute.getNamedItem(PluginConstants.CertificateEnrolment.VALUE); syncmlServiceNode.setTextContent(provisioningURL); // Adding user name auth token to wap-provisioning xml. Node userNameAuthPosition = wapParm.item(PluginConstants.CertificateEnrolment.APPAUTH_USERNAME_POSITION); NamedNodeMap appServerAttribute = userNameAuthPosition.getAttributes(); Node authNameNode = appServerAttribute.getNamedItem(PluginConstants.CertificateEnrolment.VALUE); MobileCacheEntry cacheEntry = DeviceUtil.getTokenEntry(headerBst); String userName = cacheEntry.getUsername(); authNameNode.setTextContent(cacheEntry.getUsername()); DeviceUtil.removeTokenEntry(headerBst); String password = DeviceUtil.generateRandomToken(); Node passwordAuthPosition = wapParm.item(PluginConstants.CertificateEnrolment.APPAUTH_PASSWORD_POSITION); NamedNodeMap appSrvPasswordAttribute = passwordAuthPosition.getAttributes(); Node authPasswordNode = appSrvPasswordAttribute.getNamedItem(PluginConstants.CertificateEnrolment.VALUE); authPasswordNode.setTextContent(password); String requestSecurityTokenResponse = SyncmlCredentialUtil.generateRST(userName, password); DeviceUtil.persistChallengeToken(requestSecurityTokenResponse, null, userName); // Get device polling frequency from the tenant Configurations. Node numberOfFirstRetries = wapParm.item(PluginConstants.CertificateEnrolment.POLLING_FREQUENCY_POSITION); NamedNodeMap pollingAttributes = numberOfFirstRetries.getAttributes(); Node pollValue = pollingAttributes.getNamedItem(PluginConstants.CertificateEnrolment.VALUE); pollValue.setTextContent(pollingFrequency); provisioningXmlString = convertDocumentToString(document); } catch (ParserConfigurationException e) { throw new WAPProvisioningException("Problem occurred while creating configuration request", e); } catch (CertificateEncodingException e) { throw new WindowsDeviceEnrolmentException("Error occurred while encoding certificates.", e); } catch (SAXException e) { throw new WAPProvisioningException("Error occurred while parsing wap-provisioning.xml file.", e); } catch (TransformerException e) { throw new WAPProvisioningException("Error occurred while transforming wap-provisioning.xml file.", e); } catch (IOException e) { throw new WAPProvisioningException("Error occurred while getting wap-provisioning.xml file.", e); } catch (SyncmlMessageFormatException e) { throw new WindowsDeviceEnrolmentException("Error occurred while generating password hash value.", e); } catch (KeystoreException e) { throw new CertificateGenerationException("CA certificate cannot be generated.", e); } return base64Encoder.encodeAsString(provisioningXmlString.getBytes()); } /** * This method get the soap request header contents. * * @return List of SOAP headers. */ private List<Header> getHeaders() { MessageContext messageContext = context.getMessageContext(); if (messageContext == null || !(messageContext instanceof WrappedMessageContext)) { return null; } Message message = ((WrappedMessageContext) messageContext).getWrappedMessage(); return CastUtils.cast((List<?>) message.get(Header.HEADER_LIST)); } }
// Copyright 2021 The Nomulus Authors. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package google.registry.persistence.transaction; import static google.registry.persistence.transaction.TransactionManagerFactory.assertNotReadOnlyMode; import google.registry.model.annotations.DeleteAfterMigration; import java.util.List; import java.util.Map; import javax.persistence.EntityGraph; import javax.persistence.EntityManager; import javax.persistence.EntityManagerFactory; import javax.persistence.EntityTransaction; import javax.persistence.FlushModeType; import javax.persistence.LockModeType; import javax.persistence.Query; import javax.persistence.StoredProcedureQuery; import javax.persistence.TypedQuery; import javax.persistence.criteria.CriteriaBuilder; import javax.persistence.criteria.CriteriaDelete; import javax.persistence.criteria.CriteriaQuery; import javax.persistence.criteria.CriteriaUpdate; import javax.persistence.metamodel.Metamodel; /** An {@link EntityManager} that throws exceptions on write actions if in read-only mode. */ @DeleteAfterMigration public class ReadOnlyCheckingEntityManager implements EntityManager { private final EntityManager delegate; public ReadOnlyCheckingEntityManager(EntityManager delegate) { this.delegate = delegate; } @Override public void persist(Object entity) { assertNotReadOnlyMode(); delegate.persist(entity); } @Override public <T> T merge(T entity) { assertNotReadOnlyMode(); return delegate.merge(entity); } @Override public void remove(Object entity) { assertNotReadOnlyMode(); delegate.remove(entity); } @Override public <T> T find(Class<T> entityClass, Object primaryKey) { return delegate.find(entityClass, primaryKey); } @Override public <T> T find(Class<T> entityClass, Object primaryKey, Map<String, Object> properties) { return delegate.find(entityClass, primaryKey, properties); } @Override public <T> T find(Class<T> entityClass, Object primaryKey, LockModeType lockMode) { return delegate.find(entityClass, primaryKey, lockMode); } @Override public <T> T find( Class<T> entityClass, Object primaryKey, LockModeType lockMode, Map<String, Object> properties) { return delegate.find(entityClass, primaryKey, lockMode, properties); } @Override public <T> T getReference(Class<T> entityClass, Object primaryKey) { return delegate.getReference(entityClass, primaryKey); } @Override public void flush() { delegate.flush(); } @Override public void setFlushMode(FlushModeType flushMode) { delegate.setFlushMode(flushMode); } @Override public FlushModeType getFlushMode() { return delegate.getFlushMode(); } @Override public void lock(Object entity, LockModeType lockMode) { assertNotReadOnlyMode(); delegate.lock(entity, lockMode); } @Override public void lock(Object entity, LockModeType lockMode, Map<String, Object> properties) { assertNotReadOnlyMode(); delegate.lock(entity, lockMode, properties); } @Override public void refresh(Object entity) { delegate.refresh(entity); } @Override public void refresh(Object entity, Map<String, Object> properties) { delegate.refresh(entity, properties); } @Override public void refresh(Object entity, LockModeType lockMode) { delegate.refresh(entity, lockMode); } @Override public void refresh(Object entity, LockModeType lockMode, Map<String, Object> properties) { delegate.refresh(entity, lockMode, properties); } @Override public void clear() { delegate.clear(); } @Override public void detach(Object entity) { delegate.detach(entity); } @Override public boolean contains(Object entity) { return delegate.contains(entity); } @Override public LockModeType getLockMode(Object entity) { return delegate.getLockMode(entity); } @Override public void setProperty(String propertyName, Object value) { delegate.setProperty(propertyName, value); } @Override public Map<String, Object> getProperties() { return delegate.getProperties(); } @Override public ReadOnlyCheckingQuery createQuery(String qlString) { return new ReadOnlyCheckingQuery(delegate.createQuery(qlString)); } @Override public <T> TypedQuery<T> createQuery(CriteriaQuery<T> criteriaQuery) { return new ReadOnlyCheckingTypedQuery<>(delegate.createQuery(criteriaQuery)); } @Override public Query createQuery(CriteriaUpdate updateQuery) { assertNotReadOnlyMode(); return delegate.createQuery(updateQuery); } @Override public Query createQuery(CriteriaDelete deleteQuery) { assertNotReadOnlyMode(); return delegate.createQuery(deleteQuery); } @Override public <T> TypedQuery<T> createQuery(String qlString, Class<T> resultClass) { return new ReadOnlyCheckingTypedQuery<>(delegate.createQuery(qlString, resultClass)); } @Override public Query createNamedQuery(String name) { return new ReadOnlyCheckingQuery(delegate.createNamedQuery(name)); } @Override public <T> TypedQuery<T> createNamedQuery(String name, Class<T> resultClass) { return new ReadOnlyCheckingTypedQuery<>(delegate.createNamedQuery(name, resultClass)); } @Override public ReadOnlyCheckingQuery createNativeQuery(String sqlString) { return new ReadOnlyCheckingQuery(delegate.createNativeQuery(sqlString)); } @Override public Query createNativeQuery(String sqlString, Class resultClass) { return new ReadOnlyCheckingQuery(delegate.createNativeQuery(sqlString, resultClass)); } @Override public Query createNativeQuery(String sqlString, String resultSetMapping) { return new ReadOnlyCheckingQuery(delegate.createNativeQuery(sqlString, resultSetMapping)); } @Override public StoredProcedureQuery createNamedStoredProcedureQuery(String name) { assertNotReadOnlyMode(); return delegate.createNamedStoredProcedureQuery(name); } @Override public StoredProcedureQuery createStoredProcedureQuery(String procedureName) { assertNotReadOnlyMode(); return delegate.createStoredProcedureQuery(procedureName); } @Override public StoredProcedureQuery createStoredProcedureQuery( String procedureName, Class... resultClasses) { assertNotReadOnlyMode(); return delegate.createStoredProcedureQuery(procedureName, resultClasses); } @Override public StoredProcedureQuery createStoredProcedureQuery( String procedureName, String... resultSetMappings) { assertNotReadOnlyMode(); return delegate.createStoredProcedureQuery(procedureName, resultSetMappings); } @Override public void joinTransaction() { delegate.joinTransaction(); } @Override public boolean isJoinedToTransaction() { return delegate.isJoinedToTransaction(); } @Override public <T> T unwrap(Class<T> cls) { return delegate.unwrap(cls); } @Override public Object getDelegate() { return delegate.getDelegate(); } @Override public void close() { delegate.close(); } @Override public boolean isOpen() { return delegate.isOpen(); } @Override public EntityTransaction getTransaction() { return delegate.getTransaction(); } @Override public EntityManagerFactory getEntityManagerFactory() { return delegate.getEntityManagerFactory(); } @Override public CriteriaBuilder getCriteriaBuilder() { return delegate.getCriteriaBuilder(); } @Override public Metamodel getMetamodel() { return delegate.getMetamodel(); } @Override public <T> EntityGraph<T> createEntityGraph(Class<T> rootType) { return delegate.createEntityGraph(rootType); } @Override public EntityGraph<?> createEntityGraph(String graphName) { return delegate.createEntityGraph(graphName); } @Override public EntityGraph<?> getEntityGraph(String graphName) { return delegate.getEntityGraph(graphName); } @Override public <T> List<EntityGraph<? super T>> getEntityGraphs(Class<T> entityClass) { return delegate.getEntityGraphs(entityClass); } public <T> T mergeIgnoringReadOnly(T entity) { return delegate.merge(entity); } }