gt
stringclasses
1 value
context
stringlengths
2.05k
161k
// Copyright (c) 2003-present, Jodd Team (http://jodd.org) // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are met: // // 1. Redistributions of source code must retain the above copyright notice, // this list of conditions and the following disclaimer. // // 2. Redistributions in binary form must reproduce the above copyright // notice, this list of conditions and the following disclaimer in the // documentation and/or other materials provided with the distribution. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE // ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE // LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR // CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF // SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN // CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE // POSSIBILITY OF SUCH DAMAGE. package h2o.jodd.typeconverter.impl; import h2o.jodd.typeconverter.TypeConverter; import h2o.jodd.typeconverter.TypeConverterManager; import h2o.jodd.util.CsvUtil; import java.lang.reflect.Array; import java.util.ArrayList; import java.util.Collection; import java.util.List; /** * Converts given object to an array. This converter is specific, as it * is not directly registered to a type; but created when needed. * Conversion rules: * <ul> * <li><code>null</code> value is returned as <code>null</code></li> * <li>source non-array value is checked for <code>Collections</code></li> * <li>if non-array element can't be resolved, it is converted to single element array</li> * <li>source array is converted to target array, by converting each element</li> * </ul> */ @SuppressWarnings("unchecked") public class ArrayConverter<T> implements TypeConverter<T[]> { public static final char[] NUMBER_DELIMITERS = new char[] {',', ';', '\n'}; protected final TypeConverterManager typeConverterManager; protected final Class<T> targetComponentType; public ArrayConverter(final TypeConverterManager typeConverterManager, final Class<T> targetComponentType) { this.typeConverterManager = typeConverterManager; this.targetComponentType = targetComponentType; } @Override public T[] convert(final Object value) { if (value == null) { return null; } Class valueClass = value.getClass(); if (!valueClass.isArray()) { // source is not an array return convertValueToArray(value); } // source is an array return convertArrayToArray(value); } /** * Converts type using type converter manager. */ protected T convertType(final Object value) { return typeConverterManager.convertType(value, targetComponentType); } /** * Creates new array of target component type. * Default implementation uses reflection to create * an array of target type. Override it for better performances. */ protected T[] createArray(final int length) { return (T[]) Array.newInstance(targetComponentType, length); } /** * Creates an array with single element. */ protected T[] convertToSingleElementArray(final Object value) { T[] singleElementArray = createArray(1); singleElementArray[0] = convertType(value); return singleElementArray; } /** * Converts non-array value to array. Detects various * collection types and iterates them to make conversion * and to create target array. */ protected T[] convertValueToArray(final Object value) { if (value instanceof Collection) { Collection collection = (Collection) value; T[] target = createArray(collection.size()); int i = 0; for (Object element : collection) { target[i] = convertType(element); i++; } return target; } if (value instanceof Iterable) { Iterable iterable = (Iterable) value; List<T> list = new ArrayList<>(); for (Object element : iterable) { list.add(convertType(element)); } T[] target = createArray(list.size()); return list.toArray(target); } if (value instanceof CharSequence) { String[] strings = convertStringToArray(value.toString()); return convertArrayToArray(strings); } // everything else: return convertToSingleElementArray(value); } /** * Converts string to array, for the {@link #convertValueToArray(Object)} method. * By default, the string is converted into an array using {@link CsvUtil}. */ protected String[] convertStringToArray(final String value) { return CsvUtil.toStringArray(value); } /** * Converts array value to array. */ protected T[] convertArrayToArray(final Object value) { Class valueComponentType = value.getClass().getComponentType(); if (valueComponentType == targetComponentType) { // equal types, no conversion needed return (T[]) value; } T[] result; if (valueComponentType.isPrimitive()) { // convert primitive array to target array result = convertPrimitiveArrayToArray(value, valueComponentType); } else { // convert object array to target array Object[] array = (Object[]) value; result = createArray(array.length); for (int i = 0; i < array.length; i++) { result[i] = convertType(array[i]); } } return result; } /** * Converts primitive array to target array. */ @SuppressWarnings("AutoBoxing") protected T[] convertPrimitiveArrayToArray(final Object value, final Class primitiveComponentType) { T[] result = null; if (primitiveComponentType == int.class) { int[] array = (int[]) value; result = createArray(array.length); for (int i = 0; i < array.length; i++) { result[i] = convertType(array[i]); } } else if (primitiveComponentType == long.class) { long[] array = (long[]) value; result = createArray(array.length); for (int i = 0; i < array.length; i++) { result[i] = convertType(array[i]); } } else if (primitiveComponentType == float.class) { float[] array = (float[]) value; result = createArray(array.length); for (int i = 0; i < array.length; i++) { result[i] = convertType(array[i]); } } else if (primitiveComponentType == double.class) { double[] array = (double[]) value; result = createArray(array.length); for (int i = 0; i < array.length; i++) { result[i] = convertType(array[i]); } } else if (primitiveComponentType == short.class) { short[] array = (short[]) value; result = createArray(array.length); for (int i = 0; i < array.length; i++) { result[i] = convertType(array[i]); } } else if (primitiveComponentType == byte.class) { byte[] array = (byte[]) value; result = createArray(array.length); for (int i = 0; i < array.length; i++) { result[i] = convertType(array[i]); } } else if (primitiveComponentType == char.class) { char[] array = (char[]) value; result = createArray(array.length); for (int i = 0; i < array.length; i++) { result[i] = convertType(array[i]); } } else if (primitiveComponentType == boolean.class) { boolean[] array = (boolean[]) value; result = createArray(array.length); for (int i = 0; i < array.length; i++) { result[i] = convertType(array[i]); } } return result; } }
package GUI; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.util.ArrayList; import java.util.Collections; import javax.swing.JButton; import javax.swing.JFileChooser; import javax.swing.JFrame; import javax.swing.JLabel; import javax.swing.JList; import javax.swing.JMenu; import javax.swing.JMenuBar; import javax.swing.JOptionPane; import javax.swing.JTextArea; import javax.swing.ListModel; import javax.swing.event.ListSelectionEvent; import javax.swing.event.ListSelectionListener; import ProgramControl.MainController; /* * DESCRIPTION AND JAVADOCS TODO */ @SuppressWarnings({"serial", "rawtypes", "unchecked"}) public class FileFormatSelectionWindow extends JFrame implements ActionListener { private JMenuBar MenuBar; private JMenu File, Edit, View, Help; private JTextArea CurrentDirectory; private JLabel CurDir; private JButton Browse, Add, Remove, Up, Down, Return, Go; private JList Sort, Ignore; private ArrayList<String> SortItems, IgnoreItems; public FileFormatSelectionWindow() { this.setLayout(null); //Declares each element needed for the Main Menu this.MenuBar = new JMenuBar(); this.File = new JMenu("File"); this.Edit = new JMenu("Edit"); this.View = new JMenu("View"); this.Help = new JMenu("Help"); this.CurrentDirectory = new JTextArea(MainController.CurrentDirectory.toString()); this.CurrentDirectory.setEditable(false); this.Browse = new JButton("Change..."); this.CurDir = new JLabel("Current Directory:"); this.Add = new JButton("Add"); this.Remove = new JButton("Remove"); this.Up = new JButton("Move Up"); this.Down = new JButton("Move Down"); this.Return = new JButton("<html><center>Return To Main Menu</center></html>"); this.Go = new JButton("<html><center>Rename MP3s</center></htm>"); this.IgnoreItems = new ArrayList<>(); this.SortItems = new ArrayList<>(); SortItems.add("Title"); this.IgnoreItems.add("Album"); this.IgnoreItems.add("Artist"); this.IgnoreItems.add("Genre"); this.IgnoreItems.add("Year"); this.Sort = new JList(SortItems.toArray()); this.Ignore = new JList(IgnoreItems.toArray()); //Adds the Action Listener to each element that requires one this.Browse.addActionListener(this); this.Add.addActionListener(this); this.Remove.addActionListener(this); this.Up.addActionListener(this); this.Down.addActionListener(this); this.Return.addActionListener(this); this.Go.addActionListener(this); //Adds the List Listener to each list this.Sort.addListSelectionListener(new ListSelectionListener() { @Override public void valueChanged(ListSelectionEvent event) { if (!event.getValueIsAdjusting()) { //SortSelectedItem = (String)Sort.getSelectedValue(); } } }); this.Ignore.addListSelectionListener(new ListSelectionListener() { @Override public void valueChanged(ListSelectionEvent event) { if (!event.getValueIsAdjusting()) { //IgnoreSelectedItem = (String)Ignore.getSelectedValue(); } } }); //Sets the location for each element this.CurrentDirectory.setBounds(15, 25, 520, 18); this.Browse.setBounds(550, 20, 90, 25); this.CurDir.setBounds(15, 5, 325, 15); this.Sort.setBounds(15, 65, 250, 225); this.Ignore.setBounds(385, 65, 250, 225); this.Add.setBounds(275, 65, 100, 40); this.Remove.setBounds(275, 115, 100, 40); this.Up.setBounds(275, 165, 100, 40); this.Down.setBounds(275, 215, 100, 40); this.Return.setBounds(425, 295, 100, 60); this.Go.setBounds(535, 295, 100, 60); //Adds each Menu Bar element to the Menu Bar this.MenuBar.add(this.File); this.MenuBar.add(this.Edit); this.MenuBar.add(this.View); this.MenuBar.add(this.Help); //Adds each element to the Window this.add(MenuBar); this.add(CurrentDirectory); this.add(Browse); this.add(CurDir); this.add(Sort); this.add(Ignore); this.add(Add); this.add(Remove); this.add(Up); this.add(Down); this.add(Return); this.add(Go); //Sets the window title, size, default for closing & sets it to open in the middle of the screen this.setTitle("File Format Selection"); this.setSize(665, 400); this.setDefaultCloseOperation(JFrame.DO_NOTHING_ON_CLOSE); this.setLocationRelativeTo(null); } public ArrayList<String> getFormatOrder() { ArrayList<String> ItemsInOrder = new ArrayList<String>(); ListModel Temp = Sort.getModel(); for (int j = 0; j < Temp.getSize(); j++) { ItemsInOrder.add((String) Temp.getElementAt(j)); } return ItemsInOrder; } @Override public void actionPerformed(ActionEvent event) { if (event.getSource() == Browse) { int Location = MainController.FileChooser.showOpenDialog(MainController.ReferenceFrame); if (Location == JFileChooser.APPROVE_OPTION) { MainController.CurrentDirectory = MainController.FileChooser.getSelectedFile(); } this.CurrentDirectory.setText(MainController.CurrentDirectory.toString()); } else if (event.getSource() == Add) { SortItems.add((String)Ignore.getSelectedValue()); IgnoreItems.remove(Ignore.getSelectedIndex()); this.remove(Sort); this.remove(Ignore); this.Sort = new JList(SortItems.toArray()); this.Ignore = new JList(IgnoreItems.toArray()); this.Sort.setBounds(15, 65, 250, 225); this.Ignore.setBounds(385, 65, 250, 225); this.add(Sort); this.add(Ignore); MainController.FFSW.setVisible(false); MainController.FFSW.setVisible(true); } else if (event.getSource() == Remove) { if (((String)Sort.getSelectedValue()).equals("Title")) { JOptionPane.showMessageDialog(MainController.ReferenceFrame, "You must include the song title in the file name!", "Invalid Option Choice", JOptionPane.WARNING_MESSAGE); } else { IgnoreItems.add((String)Sort.getSelectedValue()); SortItems.remove(Sort.getSelectedIndex()); this.remove(Sort); this.remove(Ignore); this.Sort = new JList(SortItems.toArray()); this.Ignore = new JList(IgnoreItems.toArray()); this.Sort.setBounds(15, 65, 250, 225); this.Ignore.setBounds(385, 65, 250, 225); this.add(Sort); this.add(Ignore); MainController.FFSW.setVisible(false); MainController.FFSW.setVisible(true); } } else if (event.getSource() == Up) { Collections.swap(SortItems, Sort.getSelectedIndex(), (Sort.getSelectedIndex() - 1)); this.remove(Sort); this.Sort = new JList(SortItems.toArray()); this.Sort.setBounds(15, 65, 250, 225); this.add(Sort); MainController.FFSW.setVisible(false); MainController.FFSW.setVisible(true); } else if (event.getSource() == Down) { Collections.swap(SortItems, Sort.getSelectedIndex(), (Sort.getSelectedIndex() + 1)); this.remove(Sort); this.Sort = new JList(SortItems.toArray()); this.Sort.setBounds(15, 65, 250, 225); this.add(Sort); MainController.FFSW.setVisible(false); MainController.FFSW.setVisible(true); } else if (event.getSource() == Return) { MainController.MainMenu.setEnabled(true); MainController.FFSW.setVisible(false); } else if(event.getSource() == Go) { MainController.FNF.format(); MainController.MainMenu.setEnabled(true); MainController.FFSW.setVisible(false); } } }
/*** * ASM: a very small and fast Java bytecode manipulation framework * Copyright (c) 2000-2011 INRIA, France Telecom * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. Neither the name of the copyright holders nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF * THE POSSIBILITY OF SUCH DAMAGE. */ package de.schenk.objectweb.asm.tree.analysis; import java.util.List; import de.schenk.objectweb.asm.Handle; import de.schenk.objectweb.asm.Opcodes; import de.schenk.objectweb.asm.Type; import de.schenk.objectweb.asm.tree.AbstractInsnNode; import de.schenk.objectweb.asm.tree.FieldInsnNode; import de.schenk.objectweb.asm.tree.IntInsnNode; import de.schenk.objectweb.asm.tree.InvokeDynamicInsnNode; import de.schenk.objectweb.asm.tree.LdcInsnNode; import de.schenk.objectweb.asm.tree.MethodInsnNode; import de.schenk.objectweb.asm.tree.MultiANewArrayInsnNode; import de.schenk.objectweb.asm.tree.TypeInsnNode; /** * An {@link Interpreter} for {@link BasicValue} values. * * @author Eric Bruneton * @author Bing Ran */ public class BasicInterpreter extends Interpreter<BasicValue> implements Opcodes { public BasicInterpreter() { super(ASM5); } protected BasicInterpreter(final int api) { super(api); } @Override public BasicValue newValue(final Type type) { if (type == null) { return BasicValue.UNINITIALIZED_VALUE; } switch (type.getSort()) { case Type.VOID: return null; case Type.BOOLEAN: case Type.CHAR: case Type.BYTE: case Type.SHORT: case Type.INT: return BasicValue.INT_VALUE; case Type.FLOAT: return BasicValue.FLOAT_VALUE; case Type.LONG: return BasicValue.LONG_VALUE; case Type.DOUBLE: return BasicValue.DOUBLE_VALUE; case Type.ARRAY: case Type.OBJECT: return BasicValue.REFERENCE_VALUE; default: throw new Error("Internal error"); } } @Override public BasicValue newOperation(final AbstractInsnNode insn) throws AnalyzerException { switch (insn.getOpcode()) { case ACONST_NULL: return newValue(Type.getObjectType("null")); case ICONST_M1: case ICONST_0: case ICONST_1: case ICONST_2: case ICONST_3: case ICONST_4: case ICONST_5: return BasicValue.INT_VALUE; case LCONST_0: case LCONST_1: return BasicValue.LONG_VALUE; case FCONST_0: case FCONST_1: case FCONST_2: return BasicValue.FLOAT_VALUE; case DCONST_0: case DCONST_1: return BasicValue.DOUBLE_VALUE; case BIPUSH: case SIPUSH: return BasicValue.INT_VALUE; case LDC: Object cst = ((LdcInsnNode) insn).cst; if (cst instanceof Integer) { return BasicValue.INT_VALUE; } else if (cst instanceof Float) { return BasicValue.FLOAT_VALUE; } else if (cst instanceof Long) { return BasicValue.LONG_VALUE; } else if (cst instanceof Double) { return BasicValue.DOUBLE_VALUE; } else if (cst instanceof String) { return newValue(Type.getObjectType("java/lang/String")); } else if (cst instanceof Type) { int sort = ((Type) cst).getSort(); if (sort == Type.OBJECT || sort == Type.ARRAY) { return newValue(Type.getObjectType("java/lang/Class")); } else if (sort == Type.METHOD) { return newValue(Type .getObjectType("java/lang/invoke/MethodType")); } else { throw new IllegalArgumentException("Illegal LDC constant " + cst); } } else if (cst instanceof Handle) { return newValue(Type .getObjectType("java/lang/invoke/MethodHandle")); } else { throw new IllegalArgumentException("Illegal LDC constant " + cst); } case JSR: return BasicValue.RETURNADDRESS_VALUE; case GETSTATIC: return newValue(Type.getType(((FieldInsnNode) insn).desc)); case NEW: return newValue(Type.getObjectType(((TypeInsnNode) insn).desc)); default: throw new Error("Internal error."); } } @Override public BasicValue copyOperation(final AbstractInsnNode insn, final BasicValue value) throws AnalyzerException { return value; } @Override public BasicValue unaryOperation(final AbstractInsnNode insn, final BasicValue value) throws AnalyzerException { switch (insn.getOpcode()) { case INEG: case IINC: case L2I: case F2I: case D2I: case I2B: case I2C: case I2S: return BasicValue.INT_VALUE; case FNEG: case I2F: case L2F: case D2F: return BasicValue.FLOAT_VALUE; case LNEG: case I2L: case F2L: case D2L: return BasicValue.LONG_VALUE; case DNEG: case I2D: case L2D: case F2D: return BasicValue.DOUBLE_VALUE; case IFEQ: case IFNE: case IFLT: case IFGE: case IFGT: case IFLE: case TABLESWITCH: case LOOKUPSWITCH: case IRETURN: case LRETURN: case FRETURN: case DRETURN: case ARETURN: case PUTSTATIC: return null; case GETFIELD: return newValue(Type.getType(((FieldInsnNode) insn).desc)); case NEWARRAY: switch (((IntInsnNode) insn).operand) { case T_BOOLEAN: return newValue(Type.getType("[Z")); case T_CHAR: return newValue(Type.getType("[C")); case T_BYTE: return newValue(Type.getType("[B")); case T_SHORT: return newValue(Type.getType("[S")); case T_INT: return newValue(Type.getType("[I")); case T_FLOAT: return newValue(Type.getType("[F")); case T_DOUBLE: return newValue(Type.getType("[D")); case T_LONG: return newValue(Type.getType("[J")); default: throw new AnalyzerException(insn, "Invalid array type"); } case ANEWARRAY: String desc = ((TypeInsnNode) insn).desc; return newValue(Type.getType("[" + Type.getObjectType(desc))); case ARRAYLENGTH: return BasicValue.INT_VALUE; case ATHROW: return null; case CHECKCAST: desc = ((TypeInsnNode) insn).desc; return newValue(Type.getObjectType(desc)); case INSTANCEOF: return BasicValue.INT_VALUE; case MONITORENTER: case MONITOREXIT: case IFNULL: case IFNONNULL: return null; default: throw new Error("Internal error."); } } @Override public BasicValue binaryOperation(final AbstractInsnNode insn, final BasicValue value1, final BasicValue value2) throws AnalyzerException { switch (insn.getOpcode()) { case IALOAD: case BALOAD: case CALOAD: case SALOAD: case IADD: case ISUB: case IMUL: case IDIV: case IREM: case ISHL: case ISHR: case IUSHR: case IAND: case IOR: case IXOR: return BasicValue.INT_VALUE; case FALOAD: case FADD: case FSUB: case FMUL: case FDIV: case FREM: return BasicValue.FLOAT_VALUE; case LALOAD: case LADD: case LSUB: case LMUL: case LDIV: case LREM: case LSHL: case LSHR: case LUSHR: case LAND: case LOR: case LXOR: return BasicValue.LONG_VALUE; case DALOAD: case DADD: case DSUB: case DMUL: case DDIV: case DREM: return BasicValue.DOUBLE_VALUE; case AALOAD: return BasicValue.REFERENCE_VALUE; case LCMP: case FCMPL: case FCMPG: case DCMPL: case DCMPG: return BasicValue.INT_VALUE; case IF_ICMPEQ: case IF_ICMPNE: case IF_ICMPLT: case IF_ICMPGE: case IF_ICMPGT: case IF_ICMPLE: case IF_ACMPEQ: case IF_ACMPNE: case PUTFIELD: return null; default: throw new Error("Internal error."); } } @Override public BasicValue ternaryOperation(final AbstractInsnNode insn, final BasicValue value1, final BasicValue value2, final BasicValue value3) throws AnalyzerException { return null; } @Override public BasicValue naryOperation(final AbstractInsnNode insn, final List<? extends BasicValue> values) throws AnalyzerException { int opcode = insn.getOpcode(); if (opcode == MULTIANEWARRAY) { return newValue(Type.getType(((MultiANewArrayInsnNode) insn).desc)); } else if (opcode == INVOKEDYNAMIC) { return newValue(Type .getReturnType(((InvokeDynamicInsnNode) insn).desc)); } else { return newValue(Type.getReturnType(((MethodInsnNode) insn).desc)); } } @Override public void returnOperation(final AbstractInsnNode insn, final BasicValue value, final BasicValue expected) throws AnalyzerException { } @Override public BasicValue merge(final BasicValue v, final BasicValue w) { if (!v.equals(w)) { return BasicValue.UNINITIALIZED_VALUE; } return v; } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ package org.elasticsearch.xpack.security.authz.store; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.lucene.util.automaton.Automaton; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.core.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.cache.Cache; import org.elasticsearch.common.cache.CacheBuilder; import org.elasticsearch.core.Tuple; import org.elasticsearch.common.hash.MessageDigests; import org.elasticsearch.common.logging.DeprecationCategory; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ReleasableLock; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.license.XPackLicenseState.Feature; import org.elasticsearch.xpack.core.common.IteratingActionListener; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.IndicesPrivileges; import org.elasticsearch.xpack.core.security.authz.accesscontrol.DocumentSubsetBitsetCache; import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissionsCache; import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissionsDefinition; import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissionsDefinition.FieldGrantExcludeGroup; import org.elasticsearch.xpack.core.security.authz.permission.LimitedRole; import org.elasticsearch.xpack.core.security.authz.permission.Role; import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilege; import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivilege; import org.elasticsearch.xpack.core.security.authz.privilege.IndexPrivilege; import org.elasticsearch.xpack.core.security.authz.privilege.Privilege; import org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore; import org.elasticsearch.xpack.core.security.authz.store.RoleRetrievalResult; import org.elasticsearch.xpack.core.security.support.CacheIteratorHelper; import org.elasticsearch.xpack.core.security.support.MetadataUtils; import org.elasticsearch.xpack.core.security.user.AnonymousUser; import org.elasticsearch.xpack.core.security.user.AsyncSearchUser; import org.elasticsearch.xpack.core.security.user.SystemUser; import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.core.security.user.XPackSecurityUser; import org.elasticsearch.xpack.core.security.user.XPackUser; import org.elasticsearch.xpack.security.authc.ApiKeyService; import org.elasticsearch.xpack.security.authc.service.ServiceAccountService; import org.elasticsearch.xpack.security.support.SecurityIndexManager; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.concurrent.atomic.AtomicLong; import java.util.function.BiConsumer; import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Predicate; import java.util.stream.Collectors; import static java.util.function.Predicate.not; import static org.elasticsearch.common.util.set.Sets.newHashSet; import static org.elasticsearch.xpack.core.security.authc.Authentication.VERSION_API_KEY_ROLES_AS_BYTES; import static org.elasticsearch.xpack.security.support.SecurityIndexManager.isIndexDeleted; import static org.elasticsearch.xpack.security.support.SecurityIndexManager.isMoveFromRedToNonRed; /** * A composite roles store that combines built in roles, file-based roles, and index-based roles. Checks the built in roles first, then the * file roles, and finally the index roles. */ public class CompositeRolesStore { private static final String ROLES_STORE_SOURCE = "roles_stores"; private static final Setting<Integer> CACHE_SIZE_SETTING = Setting.intSetting("xpack.security.authz.store.roles.cache.max_size", 10000, Property.NodeScope); private static final Setting<Integer> NEGATIVE_LOOKUP_CACHE_SIZE_SETTING = Setting.intSetting("xpack.security.authz.store.roles.negative_lookup_cache.max_size", 10000, Property.NodeScope); private static final Logger logger = LogManager.getLogger(CompositeRolesStore.class); private final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(CompositeRolesStore.class); private final FileRolesStore fileRolesStore; private final NativeRolesStore nativeRolesStore; private final NativePrivilegeStore privilegeStore; private final XPackLicenseState licenseState; private final Consumer<Collection<RoleDescriptor>> effectiveRoleDescriptorsConsumer; private final FieldPermissionsCache fieldPermissionsCache; private final Cache<RoleKey, Role> roleCache; private final CacheIteratorHelper<RoleKey, Role> roleCacheHelper; private final Cache<String, Boolean> negativeLookupCache; private final DocumentSubsetBitsetCache dlsBitsetCache; private final ThreadContext threadContext; private final AtomicLong numInvalidation = new AtomicLong(); private final AnonymousUser anonymousUser; private final ApiKeyService apiKeyService; private final ServiceAccountService serviceAccountService; private final boolean isAnonymousEnabled; private final List<BiConsumer<Set<String>, ActionListener<RoleRetrievalResult>>> builtInRoleProviders; private final List<BiConsumer<Set<String>, ActionListener<RoleRetrievalResult>>> allRoleProviders; private final Role superuserRole; private final Role xpackUserRole; private final Role asyncSearchUserRole; private final Automaton restrictedIndicesAutomaton; public CompositeRolesStore(Settings settings, FileRolesStore fileRolesStore, NativeRolesStore nativeRolesStore, ReservedRolesStore reservedRolesStore, NativePrivilegeStore privilegeStore, List<BiConsumer<Set<String>, ActionListener<RoleRetrievalResult>>> rolesProviders, ThreadContext threadContext, XPackLicenseState licenseState, FieldPermissionsCache fieldPermissionsCache, ApiKeyService apiKeyService, ServiceAccountService serviceAccountService, DocumentSubsetBitsetCache dlsBitsetCache, IndexNameExpressionResolver resolver, Consumer<Collection<RoleDescriptor>> effectiveRoleDescriptorsConsumer) { this.fileRolesStore = Objects.requireNonNull(fileRolesStore); this.dlsBitsetCache = Objects.requireNonNull(dlsBitsetCache); fileRolesStore.addListener(this::invalidate); this.nativeRolesStore = Objects.requireNonNull(nativeRolesStore); this.privilegeStore = Objects.requireNonNull(privilegeStore); this.licenseState = Objects.requireNonNull(licenseState); this.fieldPermissionsCache = Objects.requireNonNull(fieldPermissionsCache); this.apiKeyService = Objects.requireNonNull(apiKeyService); this.serviceAccountService = Objects.requireNonNull(serviceAccountService); this.effectiveRoleDescriptorsConsumer = Objects.requireNonNull(effectiveRoleDescriptorsConsumer); CacheBuilder<RoleKey, Role> builder = CacheBuilder.builder(); final int cacheSize = CACHE_SIZE_SETTING.get(settings); if (cacheSize >= 0) { builder.setMaximumWeight(cacheSize); } this.roleCache = builder.build(); this.roleCacheHelper = new CacheIteratorHelper<>(roleCache); this.threadContext = threadContext; CacheBuilder<String, Boolean> nlcBuilder = CacheBuilder.builder(); final int nlcCacheSize = NEGATIVE_LOOKUP_CACHE_SIZE_SETTING.get(settings); if (nlcCacheSize >= 0) { nlcBuilder.setMaximumWeight(nlcCacheSize); } this.negativeLookupCache = nlcBuilder.build(); this.builtInRoleProviders = List.of(reservedRolesStore, fileRolesStore, nativeRolesStore); if (rolesProviders.isEmpty()) { this.allRoleProviders = this.builtInRoleProviders; } else { List<BiConsumer<Set<String>, ActionListener<RoleRetrievalResult>>> allList = new ArrayList<>(builtInRoleProviders.size() + rolesProviders.size()); allList.addAll(builtInRoleProviders); allList.addAll(rolesProviders); this.allRoleProviders = Collections.unmodifiableList(allList); } this.anonymousUser = new AnonymousUser(settings); this.isAnonymousEnabled = AnonymousUser.isAnonymousEnabled(settings); this.restrictedIndicesAutomaton = resolver.getSystemNameAutomaton(); this.superuserRole = Role.builder( ReservedRolesStore.SUPERUSER_ROLE_DESCRIPTOR, fieldPermissionsCache, restrictedIndicesAutomaton ).build(); xpackUserRole = Role.builder(XPackUser.ROLE_DESCRIPTOR, fieldPermissionsCache, restrictedIndicesAutomaton).build(); asyncSearchUserRole = Role.builder(AsyncSearchUser.ROLE_DESCRIPTOR, fieldPermissionsCache, restrictedIndicesAutomaton).build(); } public void roles(Set<String> roleNames, ActionListener<Role> roleActionListener) { final RoleKey roleKey = new RoleKey(roleNames, ROLES_STORE_SOURCE); Role existing = roleCache.get(roleKey); if (existing != null) { roleActionListener.onResponse(existing); } else { final long invalidationCounter = numInvalidation.get(); roleDescriptors(roleNames, ActionListener.wrap( rolesRetrievalResult -> { logDeprecatedRoles(rolesRetrievalResult.roleDescriptors); final boolean missingRoles = rolesRetrievalResult.getMissingRoles().isEmpty() == false; if (missingRoles) { logger.debug(() -> new ParameterizedMessage("Could not find roles with names {}", rolesRetrievalResult.getMissingRoles())); } final Set<RoleDescriptor> effectiveDescriptors; Set<RoleDescriptor> roleDescriptors = rolesRetrievalResult.getRoleDescriptors(); if (roleDescriptors.stream().anyMatch(RoleDescriptor::isUsingDocumentOrFieldLevelSecurity) && licenseState.checkFeature(Feature.SECURITY_DLS_FLS) == false) { effectiveDescriptors = roleDescriptors.stream() .filter(not(RoleDescriptor::isUsingDocumentOrFieldLevelSecurity)) .collect(Collectors.toSet()); } else { effectiveDescriptors = roleDescriptors; } logger.trace(() -> new ParameterizedMessage("Exposing effective role descriptors [{}] for role names [{}]", effectiveDescriptors, roleNames)); effectiveRoleDescriptorsConsumer.accept(Collections.unmodifiableCollection(effectiveDescriptors)); logger.trace(() -> new ParameterizedMessage("Building role from descriptors [{}] for role names [{}]", effectiveDescriptors, roleNames)); buildThenMaybeCacheRole(roleKey, effectiveDescriptors, rolesRetrievalResult.getMissingRoles(), rolesRetrievalResult.isSuccess(), invalidationCounter, roleActionListener); }, roleActionListener::onFailure)); } } void logDeprecatedRoles(Set<RoleDescriptor> roleDescriptors) { roleDescriptors.stream() .filter(rd -> Boolean.TRUE.equals(rd.getMetadata().get(MetadataUtils.DEPRECATED_METADATA_KEY))) .forEach(rd -> { String reason = Objects.toString( rd.getMetadata().get(MetadataUtils.DEPRECATED_REASON_METADATA_KEY), "Please check the documentation"); deprecationLogger.deprecate(DeprecationCategory.SECURITY, "deprecated_role-" + rd.getName(), "The role [" + rd.getName() + "] is deprecated and will be removed in a future version of Elasticsearch. " + reason); }); } // for testing Role getXpackUserRole() { return xpackUserRole; } // for testing Role getAsyncSearchUserRole() { return asyncSearchUserRole; } public void getRoles(User user, Authentication authentication, ActionListener<Role> roleActionListener) { // we need to special case the internal users in this method, if we apply the anonymous roles to every user including these system // user accounts then we run into the chance of a deadlock because then we need to get a role that we may be trying to get as the // internal user. The SystemUser is special cased as it has special privileges to execute internal actions and should never be // passed into this method. The XPackUser has the Superuser role and we can simply return that if (SystemUser.is(user)) { throw new IllegalArgumentException("the user [" + user.principal() + "] is the system user and we should never try to get its" + " roles"); } if (XPackUser.is(user)) { assert XPackUser.INSTANCE.roles().length == 1; roleActionListener.onResponse(xpackUserRole); return; } if (XPackSecurityUser.is(user)) { roleActionListener.onResponse(superuserRole); return; } if (AsyncSearchUser.is(user)) { roleActionListener.onResponse(asyncSearchUserRole); return; } if (authentication.isServiceAccount()) { getRolesForServiceAccount(authentication, roleActionListener); } else if (ApiKeyService.isApiKeyAuthentication(authentication)) { getRolesForApiKey(authentication, roleActionListener); } else { Set<String> roleNames = new HashSet<>(Arrays.asList(user.roles())); if (isAnonymousEnabled && anonymousUser.equals(user) == false) { if (anonymousUser.roles().length == 0) { throw new IllegalStateException("anonymous is only enabled when the anonymous user has roles"); } Collections.addAll(roleNames, anonymousUser.roles()); } if (roleNames.isEmpty()) { roleActionListener.onResponse(Role.EMPTY); } else if (roleNames.contains(ReservedRolesStore.SUPERUSER_ROLE_DESCRIPTOR.getName())) { roleActionListener.onResponse(superuserRole); } else { roles(roleNames, roleActionListener); } } } private void getRolesForServiceAccount(Authentication authentication, ActionListener<Role> roleActionListener) { serviceAccountService.getRoleDescriptor(authentication, ActionListener.wrap(roleDescriptor -> { final RoleKey roleKey = new RoleKey(Set.of(roleDescriptor.getName()), "service_account"); final Role existing = roleCache.get(roleKey); if (existing == null) { final long invalidationCounter = numInvalidation.get(); buildThenMaybeCacheRole(roleKey, List.of(roleDescriptor), Set.of(), true, invalidationCounter, roleActionListener); } else { roleActionListener.onResponse(existing); } }, roleActionListener::onFailure)); } private void getRolesForApiKey(Authentication authentication, ActionListener<Role> roleActionListener) { if (authentication.getVersion().onOrAfter(VERSION_API_KEY_ROLES_AS_BYTES)) { buildAndCacheRoleForApiKey(authentication, false, ActionListener.wrap( role -> { if (role == Role.EMPTY) { buildAndCacheRoleForApiKey(authentication, true, roleActionListener); } else { buildAndCacheRoleForApiKey(authentication, true, ActionListener.wrap( limitedByRole -> roleActionListener.onResponse( LimitedRole.createLimitedRole(role, limitedByRole)), roleActionListener::onFailure )); } }, roleActionListener::onFailure )); } else { apiKeyService.getRoleForApiKey(authentication, ActionListener.wrap(apiKeyRoleDescriptors -> { final List<RoleDescriptor> descriptors = apiKeyRoleDescriptors.getRoleDescriptors(); if (descriptors == null) { roleActionListener.onFailure(new IllegalStateException("missing role descriptors")); } else if (apiKeyRoleDescriptors.getLimitedByRoleDescriptors() == null) { buildAndCacheRoleFromDescriptors(descriptors, apiKeyRoleDescriptors.getApiKeyId() + "_role_desc", roleActionListener); } else { buildAndCacheRoleFromDescriptors(descriptors, apiKeyRoleDescriptors.getApiKeyId() + "_role_desc", ActionListener.wrap( role -> buildAndCacheRoleFromDescriptors(apiKeyRoleDescriptors.getLimitedByRoleDescriptors(), apiKeyRoleDescriptors.getApiKeyId() + "_limited_role_desc", ActionListener.wrap( limitedBy -> roleActionListener.onResponse(LimitedRole.createLimitedRole(role, limitedBy)), roleActionListener::onFailure)), roleActionListener::onFailure)); } }, roleActionListener::onFailure)); } } public void buildAndCacheRoleFromDescriptors(Collection<RoleDescriptor> roleDescriptors, String source, ActionListener<Role> listener) { if (ROLES_STORE_SOURCE.equals(source)) { throw new IllegalArgumentException("source [" + ROLES_STORE_SOURCE + "] is reserved for internal use"); } RoleKey roleKey = new RoleKey(roleDescriptors.stream().map(RoleDescriptor::getName).collect(Collectors.toSet()), source); Role existing = roleCache.get(roleKey); if (existing != null) { listener.onResponse(existing); } else { final long invalidationCounter = numInvalidation.get(); buildThenMaybeCacheRole(roleKey, roleDescriptors, Collections.emptySet(), true, invalidationCounter, listener); } } private void buildThenMaybeCacheRole(RoleKey roleKey, Collection<RoleDescriptor> roleDescriptors, Set<String> missing, boolean tryCache, long invalidationCounter, ActionListener<Role> listener) { logger.trace("Building role from descriptors [{}] for names [{}] from source [{}]", roleDescriptors, roleKey.names, roleKey.source); buildRoleFromDescriptors(roleDescriptors, fieldPermissionsCache, privilegeStore, restrictedIndicesAutomaton, ActionListener.wrap(role -> { if (role != null && tryCache) { try (ReleasableLock ignored = roleCacheHelper.acquireUpdateLock()) { /* this is kinda spooky. We use a read/write lock to ensure we don't modify the cache if we hold * the write lock (fetching stats for instance - which is kinda overkill?) but since we fetching * stuff in an async fashion we need to make sure that if the cache got invalidated since we * started the request we don't put a potential stale result in the cache, hence the * numInvalidation.get() comparison to the number of invalidation when we started. we just try to * be on the safe side and don't cache potentially stale results */ if (invalidationCounter == numInvalidation.get()) { roleCache.computeIfAbsent(roleKey, (s) -> role); } } for (String missingRole : missing) { negativeLookupCache.computeIfAbsent(missingRole, s -> Boolean.TRUE); } } listener.onResponse(role); }, listener::onFailure) ); } private void buildAndCacheRoleForApiKey(Authentication authentication, boolean limitedBy, ActionListener<Role> roleActionListener) { final Tuple<String, BytesReference> apiKeyIdAndBytes = apiKeyService.getApiKeyIdAndRoleBytes(authentication, limitedBy); final String roleDescriptorsHash = MessageDigests.toHexString(MessageDigests.digest(apiKeyIdAndBytes.v2(), MessageDigests.sha256())); final RoleKey roleKey = new RoleKey(Set.of("apikey:" + roleDescriptorsHash), limitedBy ? "apikey_limited_role" : "apikey_role"); final Role existing = roleCache.get(roleKey); if (existing == null) { final long invalidationCounter = numInvalidation.get(); final List<RoleDescriptor> roleDescriptors = apiKeyService.parseRoleDescriptors(apiKeyIdAndBytes.v1(), apiKeyIdAndBytes.v2()); buildThenMaybeCacheRole(roleKey, roleDescriptors, Collections.emptySet(), true, invalidationCounter, roleActionListener); } else { roleActionListener.onResponse(existing); } } public void getRoleDescriptors(Set<String> roleNames, ActionListener<Set<RoleDescriptor>> listener) { roleDescriptors(roleNames, ActionListener.wrap(rolesRetrievalResult -> { if (rolesRetrievalResult.isSuccess()) { listener.onResponse(rolesRetrievalResult.getRoleDescriptors()); } else { listener.onFailure(new ElasticsearchException("role retrieval had one or more failures")); } }, listener::onFailure)); } private void roleDescriptors(Set<String> roleNames, ActionListener<RolesRetrievalResult> rolesResultListener) { final Set<String> filteredRoleNames = roleNames.stream().filter((s) -> { if (negativeLookupCache.get(s) != null) { logger.debug(() -> new ParameterizedMessage("Requested role [{}] does not exist (cached)", s)); return false; } else { return true; } }).collect(Collectors.toSet()); loadRoleDescriptorsAsync(filteredRoleNames, rolesResultListener); } private void loadRoleDescriptorsAsync(Set<String> roleNames, ActionListener<RolesRetrievalResult> listener) { final RolesRetrievalResult rolesResult = new RolesRetrievalResult(); final List<BiConsumer<Set<String>, ActionListener<RoleRetrievalResult>>> asyncRoleProviders = licenseState.checkFeature(Feature.SECURITY_CUSTOM_ROLE_PROVIDERS) ? allRoleProviders : builtInRoleProviders; final ActionListener<RoleRetrievalResult> descriptorsListener = ContextPreservingActionListener.wrapPreservingContext(ActionListener.wrap(ignore -> { rolesResult.setMissingRoles(roleNames); listener.onResponse(rolesResult); }, listener::onFailure), threadContext); final Predicate<RoleRetrievalResult> iterationPredicate = result -> roleNames.isEmpty() == false; new IteratingActionListener<>(descriptorsListener, (rolesProvider, providerListener) -> { // try to resolve descriptors with role provider rolesProvider.accept(roleNames, ActionListener.wrap(result -> { if (result.isSuccess()) { logger.debug(() -> new ParameterizedMessage("Roles [{}] were resolved by [{}]", names(result.getDescriptors()), rolesProvider)); final Set<RoleDescriptor> resolvedDescriptors = result.getDescriptors(); rolesResult.addDescriptors(resolvedDescriptors); // remove resolved descriptors from the set of roles still needed to be resolved for (RoleDescriptor descriptor : resolvedDescriptors) { roleNames.remove(descriptor.getName()); } } else { logger.warn(new ParameterizedMessage("role retrieval failed from [{}]", rolesProvider), result.getFailure()); rolesResult.setFailure(); } providerListener.onResponse(result); }, providerListener::onFailure)); }, asyncRoleProviders, threadContext, Function.identity(), iterationPredicate).run(); } private String names(Collection<RoleDescriptor> descriptors) { return descriptors.stream().map(RoleDescriptor::getName).collect(Collectors.joining(",")); } public static void buildRoleFromDescriptors(Collection<RoleDescriptor> roleDescriptors, FieldPermissionsCache fieldPermissionsCache, NativePrivilegeStore privilegeStore, Automaton restrictedIndicesAutomaton, ActionListener<Role> listener) { if (roleDescriptors.isEmpty()) { listener.onResponse(Role.EMPTY); return; } Set<String> clusterPrivileges = new HashSet<>(); final List<ConfigurableClusterPrivilege> configurableClusterPrivileges = new ArrayList<>(); Set<String> runAs = new HashSet<>(); final Map<Set<String>, MergeableIndicesPrivilege> restrictedIndicesPrivilegesMap = new HashMap<>(); final Map<Set<String>, MergeableIndicesPrivilege> indicesPrivilegesMap = new HashMap<>(); // Keyed by application + resource Map<Tuple<String, Set<String>>, Set<String>> applicationPrivilegesMap = new HashMap<>(); List<String> roleNames = new ArrayList<>(roleDescriptors.size()); for (RoleDescriptor descriptor : roleDescriptors) { roleNames.add(descriptor.getName()); if (descriptor.getClusterPrivileges() != null) { clusterPrivileges.addAll(Arrays.asList(descriptor.getClusterPrivileges())); } if (descriptor.getConditionalClusterPrivileges() != null) { configurableClusterPrivileges.addAll(Arrays.asList(descriptor.getConditionalClusterPrivileges())); } if (descriptor.getRunAs() != null) { runAs.addAll(Arrays.asList(descriptor.getRunAs())); } MergeableIndicesPrivilege.collatePrivilegesByIndices(descriptor.getIndicesPrivileges(), true, restrictedIndicesPrivilegesMap); MergeableIndicesPrivilege.collatePrivilegesByIndices(descriptor.getIndicesPrivileges(), false, indicesPrivilegesMap); for (RoleDescriptor.ApplicationResourcePrivileges appPrivilege : descriptor.getApplicationPrivileges()) { Tuple<String, Set<String>> key = new Tuple<>(appPrivilege.getApplication(), newHashSet(appPrivilege.getResources())); applicationPrivilegesMap.compute(key, (k, v) -> { if (v == null) { return newHashSet(appPrivilege.getPrivileges()); } else { v.addAll(Arrays.asList(appPrivilege.getPrivileges())); return v; } }); } } final Privilege runAsPrivilege = runAs.isEmpty() ? Privilege.NONE : new Privilege(runAs, runAs.toArray(Strings.EMPTY_ARRAY)); final Role.Builder builder = Role.builder(restrictedIndicesAutomaton, roleNames.toArray(Strings.EMPTY_ARRAY)) .cluster(clusterPrivileges, configurableClusterPrivileges) .runAs(runAsPrivilege); indicesPrivilegesMap.forEach((key, privilege) -> builder.add( fieldPermissionsCache.getFieldPermissions(privilege.fieldPermissionsDefinition), privilege.query, IndexPrivilege.get(privilege.privileges), false, privilege.indices.toArray(Strings.EMPTY_ARRAY) ) ); restrictedIndicesPrivilegesMap.forEach((key, privilege) -> builder.add( fieldPermissionsCache.getFieldPermissions(privilege.fieldPermissionsDefinition), privilege.query, IndexPrivilege.get(privilege.privileges), true, privilege.indices.toArray(Strings.EMPTY_ARRAY) ) ); if (applicationPrivilegesMap.isEmpty()) { listener.onResponse(builder.build()); } else { final Set<String> applicationNames = applicationPrivilegesMap.keySet().stream() .map(Tuple::v1) .collect(Collectors.toSet()); final Set<String> applicationPrivilegeNames = applicationPrivilegesMap.values().stream() .flatMap(Collection::stream) .collect(Collectors.toSet()); privilegeStore.getPrivileges(applicationNames, applicationPrivilegeNames, ActionListener.wrap(appPrivileges -> { applicationPrivilegesMap.forEach((key, names) -> ApplicationPrivilege.get(key.v1(), names, appPrivileges) .forEach(priv -> builder.addApplicationPrivilege(priv, key.v2()))); listener.onResponse(builder.build()); }, listener::onFailure)); } } public void invalidateAll() { numInvalidation.incrementAndGet(); negativeLookupCache.invalidateAll(); try (ReleasableLock ignored = roleCacheHelper.acquireUpdateLock()) { roleCache.invalidateAll(); } dlsBitsetCache.clear("role store invalidation"); } public void invalidate(String role) { numInvalidation.incrementAndGet(); roleCacheHelper.removeKeysIf(key -> key.names.contains(role)); negativeLookupCache.invalidate(role); } public void invalidate(Set<String> roles) { numInvalidation.incrementAndGet(); roleCacheHelper.removeKeysIf(key -> Sets.haveEmptyIntersection(key.names, roles) == false); roles.forEach(negativeLookupCache::invalidate); } public void usageStats(ActionListener<Map<String, Object>> listener) { final Map<String, Object> usage = new HashMap<>(2); usage.put("file", fileRolesStore.usageStats()); usage.put("dls", Map.of("bit_set_cache", dlsBitsetCache.usageStats())); nativeRolesStore.usageStats(ActionListener.wrap(map -> { usage.put("native", map); listener.onResponse(usage); }, listener::onFailure)); } public void onSecurityIndexStateChange(SecurityIndexManager.State previousState, SecurityIndexManager.State currentState) { if (isMoveFromRedToNonRed(previousState, currentState) || isIndexDeleted(previousState, currentState) || Objects.equals(previousState.indexUUID, currentState.indexUUID) == false || previousState.isIndexUpToDate != currentState.isIndexUpToDate) { invalidateAll(); } } // pkg - private for testing boolean isValueInNegativeLookupCache(String key) { return negativeLookupCache.get(key) != null; } /** * A mutable class that can be used to represent the combination of one or more {@link IndicesPrivileges} */ private static class MergeableIndicesPrivilege { private final Set<String> indices; private final Set<String> privileges; private FieldPermissionsDefinition fieldPermissionsDefinition; private Set<BytesReference> query = null; MergeableIndicesPrivilege(String[] indices, String[] privileges, @Nullable String[] grantedFields, @Nullable String[] deniedFields, @Nullable BytesReference query) { this.indices = newHashSet(Objects.requireNonNull(indices)); this.privileges = newHashSet(Objects.requireNonNull(privileges)); this.fieldPermissionsDefinition = new FieldPermissionsDefinition(grantedFields, deniedFields); if (query != null) { this.query = newHashSet(query); } } void merge(MergeableIndicesPrivilege other) { assert indices.equals(other.indices) : "index names must be equivalent in order to merge"; Set<FieldGrantExcludeGroup> groups = new HashSet<>(); groups.addAll(this.fieldPermissionsDefinition.getFieldGrantExcludeGroups()); groups.addAll(other.fieldPermissionsDefinition.getFieldGrantExcludeGroups()); this.fieldPermissionsDefinition = new FieldPermissionsDefinition(groups); this.privileges.addAll(other.privileges); if (this.query == null || other.query == null) { this.query = null; } else { this.query.addAll(other.query); } } private static void collatePrivilegesByIndices(IndicesPrivileges[] indicesPrivileges, boolean allowsRestrictedIndices, Map<Set<String>, MergeableIndicesPrivilege> indicesPrivilegesMap) { for (final IndicesPrivileges indicesPrivilege : indicesPrivileges) { // if a index privilege is an explicit denial, then we treat it as non-existent since we skipped these in the past when // merging final boolean isExplicitDenial = indicesPrivileges.length == 1 && "none".equalsIgnoreCase(indicesPrivilege.getPrivileges()[0]); if (isExplicitDenial || (indicesPrivilege.allowRestrictedIndices() != allowsRestrictedIndices)) { continue; } final Set<String> key = newHashSet(indicesPrivilege.getIndices()); indicesPrivilegesMap.compute(key, (k, value) -> { if (value == null) { return new MergeableIndicesPrivilege(indicesPrivilege.getIndices(), indicesPrivilege.getPrivileges(), indicesPrivilege.getGrantedFields(), indicesPrivilege.getDeniedFields(), indicesPrivilege.getQuery()); } else { value.merge(new MergeableIndicesPrivilege(indicesPrivilege.getIndices(), indicesPrivilege.getPrivileges(), indicesPrivilege.getGrantedFields(), indicesPrivilege.getDeniedFields(), indicesPrivilege.getQuery())); return value; } }); } } } private static final class RolesRetrievalResult { private final Set<RoleDescriptor> roleDescriptors = new HashSet<>(); private Set<String> missingRoles = Collections.emptySet(); private boolean success = true; private void addDescriptors(Set<RoleDescriptor> descriptors) { roleDescriptors.addAll(descriptors); } private Set<RoleDescriptor> getRoleDescriptors() { return roleDescriptors; } private void setFailure() { success = false; } private boolean isSuccess() { return success; } private void setMissingRoles(Set<String> missingRoles) { this.missingRoles = missingRoles; } private Set<String> getMissingRoles() { return missingRoles; } } private static final class RoleKey { private final Set<String> names; private final String source; private RoleKey(Set<String> names, String source) { this.names = Objects.requireNonNull(names); this.source = Objects.requireNonNull(source); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; RoleKey roleKey = (RoleKey) o; return names.equals(roleKey.names) && source.equals(roleKey.source); } @Override public int hashCode() { return Objects.hash(names, source); } } public static List<Setting<?>> getSettings() { return Arrays.asList(CACHE_SIZE_SETTING, NEGATIVE_LOOKUP_CACHE_SIZE_SETTING); } }
/* Copyright 2014-2016 Intel Corporation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package apple.gameplaykit; import apple.NSObject; import apple.foundation.NSArray; import apple.foundation.NSMethodSignature; import apple.foundation.NSSet; import org.moe.natj.c.ann.FunctionPtr; import org.moe.natj.general.NatJ; import org.moe.natj.general.Pointer; import org.moe.natj.general.ann.Generated; import org.moe.natj.general.ann.Library; import org.moe.natj.general.ann.Mapped; import org.moe.natj.general.ann.NInt; import org.moe.natj.general.ann.NUInt; import org.moe.natj.general.ann.Owned; import org.moe.natj.general.ann.Runtime; import org.moe.natj.general.ptr.VoidPtr; import org.moe.natj.objc.Class; import org.moe.natj.objc.ObjCRuntime; import org.moe.natj.objc.SEL; import org.moe.natj.objc.ann.ObjCClassBinding; import org.moe.natj.objc.ann.Selector; import org.moe.natj.objc.map.ObjCObjectMapper; /** * Represents a single state in a state machine. * By default, states allow transitions freely to and from the states in the machine. * <p> * If a more restricted set of valid transitions are needed in the state machine, you may override isValidNextState: where applicable. * * @see GKStateMachine * @see isValidNextState: */ @Generated @Library("GameplayKit") @Runtime(ObjCRuntime.class) @ObjCClassBinding public class GKState extends NSObject { static { NatJ.register(); } @Generated protected GKState(Pointer peer) { super(peer); } @Generated @Selector("accessInstanceVariablesDirectly") public static native boolean accessInstanceVariablesDirectly(); @Generated @Owned @Selector("alloc") public static native GKState alloc(); @Owned @Generated @Selector("allocWithZone:") public static native GKState allocWithZone(VoidPtr zone); @Generated @Selector("automaticallyNotifiesObserversForKey:") public static native boolean automaticallyNotifiesObserversForKey(String key); @Generated @Selector("cancelPreviousPerformRequestsWithTarget:") public static native void cancelPreviousPerformRequestsWithTarget(@Mapped(ObjCObjectMapper.class) Object aTarget); @Generated @Selector("cancelPreviousPerformRequestsWithTarget:selector:object:") public static native void cancelPreviousPerformRequestsWithTargetSelectorObject( @Mapped(ObjCObjectMapper.class) Object aTarget, SEL aSelector, @Mapped(ObjCObjectMapper.class) Object anArgument); @Generated @Selector("classFallbacksForKeyedArchiver") public static native NSArray<String> classFallbacksForKeyedArchiver(); @Generated @Selector("classForKeyedUnarchiver") public static native Class classForKeyedUnarchiver(); @Generated @Selector("debugDescription") public static native String debugDescription_static(); @Generated @Selector("description") public static native String description_static(); @Generated @Selector("hash") @NUInt public static native long hash_static(); @Generated @Selector("instanceMethodForSelector:") @FunctionPtr(name = "call_instanceMethodForSelector_ret") public static native NSObject.Function_instanceMethodForSelector_ret instanceMethodForSelector(SEL aSelector); @Generated @Selector("instanceMethodSignatureForSelector:") public static native NSMethodSignature instanceMethodSignatureForSelector(SEL aSelector); @Generated @Selector("instancesRespondToSelector:") public static native boolean instancesRespondToSelector(SEL aSelector); @Generated @Selector("isSubclassOfClass:") public static native boolean isSubclassOfClass(Class aClass); @Generated @Selector("keyPathsForValuesAffectingValueForKey:") public static native NSSet<String> keyPathsForValuesAffectingValueForKey(String key); @Generated @Owned @Selector("new") public static native GKState new_objc(); @Generated @Selector("resolveClassMethod:") public static native boolean resolveClassMethod(SEL sel); @Generated @Selector("resolveInstanceMethod:") public static native boolean resolveInstanceMethod(SEL sel); @Generated @Selector("setVersion:") public static native void setVersion_static(@NInt long aVersion); /** * Creates a new state to be used in a state machine. * * @see GKStateMachine */ @Generated @Selector("state") public static native GKState state(); @Generated @Selector("superclass") public static native Class superclass_static(); @Generated @Selector("version") @NInt public static native long version_static(); /** * Called by GKStateMachine when this state is entered. * * @param previousState the state that was exited to enter this state. This is nil if this is the state machine's first entered state. * @see stateMachineWithStates:initialStateClass: */ @Generated @Selector("didEnterWithPreviousState:") public native void didEnterWithPreviousState(GKState previousState); @Generated @Selector("init") public native GKState init(); /** * Returns YES if the given class is a valid next state to enter. * <p> * By default GKState will return YES for any class that is subclass of GKState. * Override this in a subclass to enforce limited edge traversals in the state machine. * * @param stateClass the class to be checked * @return YES if the class is kind of GKState and the state transition is valid, else NO. * @see GKStateMachine.canEnterState: * @see GKStateMachine.enterState: */ @Generated @Selector("isValidNextState:") public native boolean isValidNextState(Class stateClass); /** * The state machine that this state is associated with. * This is nil if this state hasn't been added to a state machine yet. */ @Generated @Selector("stateMachine") public native GKStateMachine stateMachine(); /** * Called by GKStateMachine when it is updated * * @param seconds the time in seconds since the last update */ @Generated @Selector("updateWithDeltaTime:") public native void updateWithDeltaTime(double seconds); /** * Called by GKStateMachine when this state is exited * * @param nextState the state that is being entered next */ @Generated @Selector("willExitWithNextState:") public native void willExitWithNextState(GKState nextState); }
/* * Code adapted from Greenrobot Essentials Murmur3F.java (https://git.io/fAG0Z) * * Copyright (C) 2014-2016 Markus Junginger, greenrobot (http://greenrobot.org) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.ms123.common.system.groovydir.fswatcher.hashing; import java.math.BigInteger; import java.util.zip.Checksum; /** Murmur3F (MurmurHash3_x64_128) */ public class Murmur3F implements Checksum { private static final long C1 = 0x87c37b91114253d5L; private static final long C2 = 0x4cf5ad432745937fL; private final long seed; private long h1; private long h2; private int length; private int partialPos; private long partialK1; private long partialK2; private boolean finished; private long finishedH1; private long finishedH2; public Murmur3F() { seed = 0; } public Murmur3F(int seed) { this.seed = seed & 0xffffffffL; // unsigned 32 bit -> long h1 = h2 = this.seed; } @Override public void update(int b) { finished = false; switch (partialPos) { case 0: partialK1 = 0xff & b; break; case 1: partialK1 |= (0xff & b) << 8; break; case 2: partialK1 |= (0xff & b) << 16; break; case 3: partialK1 |= (0xffL & b) << 24; break; case 4: partialK1 |= (0xffL & b) << 32; break; case 5: partialK1 |= (0xffL & b) << 40; break; case 6: partialK1 |= (0xffL & b) << 48; break; case 7: partialK1 |= (0xffL & b) << 56; break; case 8: partialK2 = 0xff & b; break; case 9: partialK2 |= (0xff & b) << 8; break; case 10: partialK2 |= (0xff & b) << 16; break; case 11: partialK2 |= (0xffL & b) << 24; break; case 12: partialK2 |= (0xffL & b) << 32; break; case 13: partialK2 |= (0xffL & b) << 40; break; case 14: partialK2 |= (0xffL & b) << 48; break; case 15: partialK2 |= (0xffL & b) << 56; break; } partialPos++; if (partialPos == 16) { applyKs(partialK1, partialK2); partialPos = 0; } length++; } public void update(byte[] b) { update(b, 0, b.length); } @Override public void update(byte[] b, int off, int len) { finished = false; while (partialPos != 0 && len > 0) { update(b[off]); off++; len--; } int remainder = len & 0xF; int stop = off + len - remainder; for (int i = off; i < stop; i += 16) { long k1 = getLongLE(b, i); long k2 = getLongLE(b, i + 8); applyKs(k1, k2); } length += stop - off; for (int i = 0; i < remainder; i++) { update(b[stop + i]); } } private void applyKs(long k1, long k2) { k1 *= C1; k1 = Long.rotateLeft(k1, 31); k1 *= C2; h1 ^= k1; h1 = Long.rotateLeft(h1, 27); h1 += h2; h1 = h1 * 5 + 0x52dce729; k2 *= C2; k2 = Long.rotateLeft(k2, 33); k2 *= C1; h2 ^= k2; h2 = Long.rotateLeft(h2, 31); h2 += h1; h2 = h2 * 5 + 0x38495ab5; } private void checkFinished() { if (!finished) { finished = true; finishedH1 = h1; finishedH2 = h2; if (partialPos > 0) { if (partialPos > 8) { long k2 = partialK2 * C2; k2 = Long.rotateLeft(k2, 33); k2 *= C1; finishedH2 ^= k2; } long k1 = partialK1 * C1; k1 = Long.rotateLeft(k1, 31); k1 *= C2; finishedH1 ^= k1; } finishedH1 ^= length; finishedH2 ^= length; finishedH1 += finishedH2; finishedH2 += finishedH1; finishedH1 = fmix64(finishedH1); finishedH2 = fmix64(finishedH2); finishedH1 += finishedH2; finishedH2 += finishedH1; } } private long fmix64(long k) { k ^= k >>> 33; k *= 0xff51afd7ed558ccdL; k ^= k >>> 33; k *= 0xc4ceb9fe1a85ec53L; k ^= k >>> 33; return k; } @Override /** * Returns the lower 64 bits of the 128 bit hash (you can use just this value this as a 64 bit * hash). */ public long getValue() { checkFinished(); return finishedH1; } /** Returns the higher 64 bits of the 128 bit hash. */ public long getValueHigh() { checkFinished(); return finishedH2; } /** Positive value. */ public BigInteger getValueBigInteger() { byte[] bytes = getValueBytesBigEndian(); return new BigInteger(1, bytes); } /** Padded with leading 0s to ensure length of 32. */ public String getValueHexString() { checkFinished(); return getPaddedHexString(finishedH2) + getPaddedHexString(finishedH1); } private String getPaddedHexString(long value) { String string = Long.toHexString(value); while (string.length() < 16) { string = '0' + string; } return string; } public byte[] getValueBytesBigEndian() { checkFinished(); byte[] bytes = new byte[16]; for (int i = 0; i < 8; i++) { bytes[i] = (byte) ((finishedH2 >>> (56 - i * 8)) & 0xff); } for (int i = 0; i < 8; i++) { bytes[8 + i] = (byte) ((finishedH1 >>> (56 - i * 8)) & 0xff); } return bytes; } public byte[] getValueBytesLittleEndian() { checkFinished(); byte[] bytes = new byte[16]; for (int i = 0; i < 8; i++) { bytes[i] = (byte) ((finishedH1 >>> (i * 8)) & 0xff); } for (int i = 0; i < 8; i++) { bytes[8 + i] = (byte) ((finishedH2 >>> (i * 8)) & 0xff); } return bytes; } @Override public void reset() { h1 = h2 = seed; length = 0; partialPos = 0; finished = false; // The remainder is not really necessary, but looks nicer when debugging partialK1 = partialK2 = 0; finishedH1 = finishedH2 = 0; } private long getLongLE(byte[] bytes, int index) { return (bytes[index] & 0xff) | ((bytes[index + 1] & 0xff) << 8) | ((bytes[index + 2] & 0xff) << 16) | ((bytes[index + 3] & 0xffL) << 24) | ((bytes[index + 4] & 0xffL) << 32) | ((bytes[index + 5] & 0xffL) << 40) | ((bytes[index + 6] & 0xffL) << 48) | (((long) bytes[index + 7]) << 56); } }
package de.motivational.stairs.examples; /** * Created by Florian on 12.11.2016. */ import java.awt.Color; import java.awt.Font; import java.awt.Graphics; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.KeyEvent; import java.awt.event.KeyListener; import javax.swing.*; public class PongPanelGame extends JPanel implements ActionListener, KeyListener{ public static void main(String[] args){ JFrame jFrame = new JFrame(); jFrame.add(new PongPanelGame()); jFrame.setVisible(true); } private boolean showTitleScreen = true; private boolean playing = false; private boolean gameOver = false; private boolean upPressed = false; private boolean downPressed = false; private boolean wPressed = false; private boolean sPressed = false; private int ballX = 250; private int ballY = 250; private int diameter = 20; private int ballDeltaX = -1; private int ballDeltaY = 3; private int playerOneX = 25; private int playerOneY = 250; private int playerOneWidth = 10; private int playerOneHeight = 50; private int playerTwoX = 465; private int playerTwoY = 250; private int playerTwoWidth = 10; private int playerTwoHeight = 50; private int paddleSpeed = 5; private int playerOneScore = 0; private int playerTwoScore = 0; //construct a PongPanelGame public PongPanelGame(){ setBackground(Color.BLACK); //listen to key presses setFocusable(true); addKeyListener(this); //call step() 60 fps /60 Timer timer = new Timer(1000/100, this); timer.start(); } public void actionPerformed(ActionEvent e){ step(); } public void step(){ if(playing){ //move player 1 if (upPressed) { if (playerOneY-paddleSpeed > 0) { playerOneY -= paddleSpeed; } } if (downPressed) { if (playerOneY + paddleSpeed + playerOneHeight < getHeight()) { playerOneY += paddleSpeed; } } //move player 2 if (wPressed) { if (playerTwoY-paddleSpeed > 0) { playerTwoY -= paddleSpeed; } } if (sPressed) { if (playerTwoY + paddleSpeed + playerTwoHeight < getHeight()) { playerTwoY += paddleSpeed; } } //where will the ball be after it moves? int nextBallLeft = ballX + ballDeltaX; int nextBallRight = ballX + diameter + ballDeltaX; int nextBallTop = ballY + ballDeltaY; int nextBallBottom = ballY + diameter + ballDeltaY; int playerOneRight = playerOneX + playerOneWidth; int playerOneTop = playerOneY; int playerOneBottom = playerOneY + playerOneHeight; float playerTwoLeft = playerTwoX; float playerTwoTop = playerTwoY; float playerTwoBottom = playerTwoY + playerTwoHeight; //ball bounces off top and bottom of screen if (nextBallTop < 0 || nextBallBottom > getHeight()) { ballDeltaY *= -1; } //will the ball go off the left side? if (nextBallLeft < playerOneRight) { //is it going to miss the paddle? if (nextBallTop > playerOneBottom || nextBallBottom < playerOneTop) { playerTwoScore ++; if (playerTwoScore == 3) { playing = false; gameOver = true; } ballX = 250; ballY = 250; } else { ballDeltaX *= -1; } } //will the ball go off the right side? if (nextBallRight > playerTwoLeft) { //is it going to miss the paddle? if (nextBallTop > playerTwoBottom || nextBallBottom < playerTwoTop) { playerOneScore ++; if (playerOneScore == 3) { playing = false; gameOver = true; } ballX = 250; ballY = 250; } else { ballDeltaX *= -1; } } //move the ball ballX += ballDeltaX; ballY += ballDeltaY; } //stuff has moved, tell this JPanel to repaint itself repaint(); } //paint the game screen public void paintComponent(Graphics g){ super.paintComponent(g); g.setColor(Color.WHITE); if (showTitleScreen) { g.setFont(new Font(Font.DIALOG, Font.BOLD, 36)); g.setFont(new Font(Font.DIALOG, Font.BOLD, 36)); g.drawString("Pong", 165, 100); g.setFont(new Font(Font.DIALOG, Font.BOLD, 18)); g.drawString("Press 'P' to play.", 175, 400); } else if (playing) { int playerOneRight = playerOneX + playerOneWidth; int playerTwoLeft = playerTwoX; //draw dashed line down center for (int lineY = 0; lineY < getHeight(); lineY += 50) { g.drawLine(250, lineY, 250, lineY+25); } //draw "goal lines" on each side g.drawLine(playerOneRight, 0, playerOneRight, getHeight()); g.drawLine(playerTwoLeft, 0, playerTwoLeft, getHeight()); //draw the scores g.setFont(new Font(Font.DIALOG, Font.BOLD, 36)); g.drawString(String.valueOf(playerOneScore), 100, 100); g.drawString(String.valueOf(playerTwoScore), 400, 100); //draw the ball g.fillOval(ballX, ballY, diameter, diameter); //draw the paddles g.fillRect(playerOneX, playerOneY, playerOneWidth, playerOneHeight); g.fillRect(playerTwoX, playerTwoY, playerTwoWidth, playerTwoHeight); } else if (gameOver) { g.setFont(new Font(Font.DIALOG, Font.BOLD, 36)); g.drawString(String.valueOf(playerOneScore), 100, 100); g.drawString(String.valueOf(playerTwoScore), 400, 100); g.setFont(new Font(Font.DIALOG, Font.BOLD, 36)); if (playerOneScore > playerTwoScore) { g.drawString("Player 1 Wins!", 165, 200); } else { g.drawString("Player 2 Wins!", 165, 200); } g.setFont(new Font(Font.DIALOG, Font.BOLD, 18)); g.drawString("Press space to restart.", 150, 400); } } public void keyTyped(KeyEvent e) {} public void keyPressed(KeyEvent e) { if (showTitleScreen) { if (e.getKeyCode() == KeyEvent.VK_P) { showTitleScreen = false; playing = true; } } else if(playing){ if (e.getKeyCode() == KeyEvent.VK_UP) { upPressed = true; } else if (e.getKeyCode() == KeyEvent.VK_DOWN) { downPressed = true; } else if (e.getKeyCode() == KeyEvent.VK_W) { wPressed = true; } else if (e.getKeyCode() == KeyEvent.VK_S) { sPressed = true; } } else if (gameOver) { if (e.getKeyCode() == KeyEvent.VK_SPACE) { gameOver = false; showTitleScreen = true; playerOneY = 250; playerTwoY = 250; ballX = 250; ballY = 250; playerOneScore = 0; playerTwoScore = 0; } } } public void keyReleased(KeyEvent e) { if (playing) { if (e.getKeyCode() == KeyEvent.VK_UP) { upPressed = false; } else if (e.getKeyCode() == KeyEvent.VK_DOWN) { downPressed = false; } else if (e.getKeyCode() == KeyEvent.VK_W) { wPressed = false; } else if (e.getKeyCode() == KeyEvent.VK_S) { sPressed = false; } } } }
package org.apache.lucene.index; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Random; import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.index.MergePolicy.MergeTrigger; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.store.Directory; import org.apache.lucene.store.MockDirectoryWrapper; import org.apache.lucene.store.RAMDirectory; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util._TestUtil; public class TestPerSegmentDeletes extends LuceneTestCase { public void testDeletes1() throws Exception { //IndexWriter.debug2 = System.out; Directory dir = new MockDirectoryWrapper(new Random(random().nextLong()), new RAMDirectory()); IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); iwc.setMergeScheduler(new SerialMergeScheduler()); iwc.setMaxBufferedDocs(5000); iwc.setRAMBufferSizeMB(100); RangeMergePolicy fsmp = new RangeMergePolicy(false); iwc.setMergePolicy(fsmp); IndexWriter writer = new IndexWriter(dir, iwc); for (int x = 0; x < 5; x++) { writer.addDocument(DocHelper.createDocument(x, "1", 2)); //System.out.println("numRamDocs(" + x + ")" + writer.numRamDocs()); } //System.out.println("commit1"); writer.commit(); assertEquals(1, writer.segmentInfos.size()); for (int x = 5; x < 10; x++) { writer.addDocument(DocHelper.createDocument(x, "2", 2)); //System.out.println("numRamDocs(" + x + ")" + writer.numRamDocs()); } //System.out.println("commit2"); writer.commit(); assertEquals(2, writer.segmentInfos.size()); for (int x = 10; x < 15; x++) { writer.addDocument(DocHelper.createDocument(x, "3", 2)); //System.out.println("numRamDocs(" + x + ")" + writer.numRamDocs()); } writer.deleteDocuments(new Term("id", "1")); writer.deleteDocuments(new Term("id", "11")); // flushing without applying deletes means // there will still be deletes in the segment infos writer.flush(false, false); assertTrue(writer.bufferedUpdatesStream.any()); // get reader flushes pending deletes // so there should not be anymore IndexReader r1 = writer.getReader(); assertFalse(writer.bufferedUpdatesStream.any()); r1.close(); // delete id:2 from the first segment // merge segments 0 and 1 // which should apply the delete id:2 writer.deleteDocuments(new Term("id", "2")); writer.flush(false, false); fsmp = (RangeMergePolicy) writer.getConfig().getMergePolicy(); fsmp.doMerge = true; fsmp.start = 0; fsmp.length = 2; writer.maybeMerge(); assertEquals(2, writer.segmentInfos.size()); // id:2 shouldn't exist anymore because // it's been applied in the merge and now it's gone IndexReader r2 = writer.getReader(); int[] id2docs = toDocsArray(new Term("id", "2"), null, r2); assertTrue(id2docs == null); r2.close(); /** // added docs are in the ram buffer for (int x = 15; x < 20; x++) { writer.addDocument(TestIndexWriterReader.createDocument(x, "4", 2)); System.out.println("numRamDocs(" + x + ")" + writer.numRamDocs()); } assertTrue(writer.numRamDocs() > 0); // delete from the ram buffer writer.deleteDocuments(new Term("id", Integer.toString(13))); Term id3 = new Term("id", Integer.toString(3)); // delete from the 1st segment writer.deleteDocuments(id3); assertTrue(writer.numRamDocs() > 0); //System.out // .println("segdels1:" + writer.docWriter.deletesToString()); //assertTrue(writer.docWriter.segmentDeletes.size() > 0); // we cause a merge to happen fsmp.doMerge = true; fsmp.start = 0; fsmp.length = 2; System.out.println("maybeMerge "+writer.segmentInfos); SegmentInfo info0 = writer.segmentInfos.info(0); SegmentInfo info1 = writer.segmentInfos.info(1); writer.maybeMerge(); System.out.println("maybeMerge after "+writer.segmentInfos); // there should be docs in RAM assertTrue(writer.numRamDocs() > 0); // assert we've merged the 1 and 2 segments // and still have a segment leftover == 2 assertEquals(2, writer.segmentInfos.size()); assertFalse(segThere(info0, writer.segmentInfos)); assertFalse(segThere(info1, writer.segmentInfos)); //System.out.println("segdels2:" + writer.docWriter.deletesToString()); //assertTrue(writer.docWriter.segmentDeletes.size() > 0); IndexReader r = writer.getReader(); IndexReader r1 = r.getSequentialSubReaders()[0]; printDelDocs(r1.getLiveDocs()); int[] docs = toDocsArray(id3, null, r); System.out.println("id3 docs:"+Arrays.toString(docs)); // there shouldn't be any docs for id:3 assertTrue(docs == null); r.close(); part2(writer, fsmp); **/ // System.out.println("segdels2:"+writer.docWriter.segmentDeletes.toString()); //System.out.println("close"); writer.close(); dir.close(); } /** static boolean hasPendingDeletes(SegmentInfos infos) { for (SegmentInfo info : infos) { if (info.deletes.any()) { return true; } } return false; } **/ void part2(IndexWriter writer, RangeMergePolicy fsmp) throws Exception { for (int x = 20; x < 25; x++) { writer.addDocument(DocHelper.createDocument(x, "5", 2)); //System.out.println("numRamDocs(" + x + ")" + writer.numRamDocs()); } writer.flush(false, false); for (int x = 25; x < 30; x++) { writer.addDocument(DocHelper.createDocument(x, "5", 2)); //System.out.println("numRamDocs(" + x + ")" + writer.numRamDocs()); } writer.flush(false, false); //System.out.println("infos3:"+writer.segmentInfos); Term delterm = new Term("id", "8"); writer.deleteDocuments(delterm); //System.out.println("segdels3:" + writer.docWriter.deletesToString()); fsmp.doMerge = true; fsmp.start = 1; fsmp.length = 2; writer.maybeMerge(); // deletes for info1, the newly created segment from the // merge should have no deletes because they were applied in // the merge //SegmentInfo info1 = writer.segmentInfos.info(1); //assertFalse(exists(info1, writer.docWriter.segmentDeletes)); //System.out.println("infos4:"+writer.segmentInfos); //System.out.println("segdels4:" + writer.docWriter.deletesToString()); } boolean segThere(SegmentCommitInfo info, SegmentInfos infos) { for (SegmentCommitInfo si : infos) { if (si.info.name.equals(info.info.name)) return true; } return false; } public static void printDelDocs(Bits bits) { if (bits == null) return; for (int x = 0; x < bits.length(); x++) { System.out.println(x + ":" + bits.get(x)); } } public int[] toDocsArray(Term term, Bits bits, IndexReader reader) throws IOException { Fields fields = MultiFields.getFields(reader); Terms cterms = fields.terms(term.field); TermsEnum ctermsEnum = cterms.iterator(null); if (ctermsEnum.seekExact(new BytesRef(term.text()))) { DocsEnum docsEnum = _TestUtil.docs(random(), ctermsEnum, bits, null, DocsEnum.FLAG_NONE); return toArray(docsEnum); } return null; } public static int[] toArray(DocsEnum docsEnum) throws IOException { List<Integer> docs = new ArrayList<Integer>(); while (docsEnum.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) { int docID = docsEnum.docID(); docs.add(docID); } return ArrayUtil.toIntArray(docs); } public class RangeMergePolicy extends MergePolicy { boolean doMerge = false; int start; int length; private final boolean useCompoundFile; private RangeMergePolicy(boolean useCompoundFile) { this.useCompoundFile = useCompoundFile; } @Override public void close() {} @Override public MergeSpecification findMerges(MergeTrigger mergeTrigger, SegmentInfos segmentInfos) throws IOException { MergeSpecification ms = new MergeSpecification(); if (doMerge) { OneMerge om = new OneMerge(segmentInfos.asList().subList(start, start + length)); ms.add(om); doMerge = false; return ms; } return null; } @Override public MergeSpecification findForcedMerges(SegmentInfos segmentInfos, int maxSegmentCount, Map<SegmentCommitInfo,Boolean> segmentsToMerge) throws IOException { return null; } @Override public MergeSpecification findForcedDeletesMerges( SegmentInfos segmentInfos) throws IOException { return null; } @Override public boolean useCompoundFile(SegmentInfos segments, SegmentCommitInfo newSegment) { return useCompoundFile; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.phoenix.end2end; import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES; import static org.apache.phoenix.util.TestUtil.analyzeTable; import static org.apache.phoenix.util.TestUtil.getAllSplits; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.math.BigDecimal; import java.sql.Connection; import java.sql.Date; import java.sql.DriverManager; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.List; import java.util.Properties; import org.apache.phoenix.query.KeyRange; import org.apache.phoenix.util.PropertiesUtil; import org.apache.phoenix.util.QueryUtil; import org.apache.phoenix.util.SchemaUtil; import org.apache.phoenix.util.TestUtil; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; @Category(ParallelStatsEnabledTest.class) public class MultiCfQueryExecIT extends ParallelStatsEnabledIT { private String fullTableName; @Before public void generateTableNames() throws SQLException { String schemaName = TestUtil.DEFAULT_SCHEMA_NAME; String tableName = "T_" + generateUniqueName(); fullTableName = SchemaUtil.getTableName(schemaName, tableName); } private void createTable(Connection conn) throws SQLException { conn.createStatement().execute( "create table " + fullTableName + " (id char(15) not null primary key,\n" + " a.unique_user_count integer,\n" + " b.unique_org_count integer,\n" + " c.db_cpu_utilization decimal(31,10),\n" + " d.transaction_count bigint,\n" + " e.cpu_utilization decimal(31,10),\n" + " f.response_time bigint,\n" + " g.response_time bigint)"); } private void initTableValues(Connection conn) throws Exception { // Insert all rows at ts PreparedStatement stmt = conn.prepareStatement( "upsert into " + fullTableName + "(" + " ID, " + " TRANSACTION_COUNT, " + " CPU_UTILIZATION, " + " DB_CPU_UTILIZATION," + " UNIQUE_USER_COUNT," + " F.RESPONSE_TIME," + " G.RESPONSE_TIME)" + "VALUES (?, ?, ?, ?, ?, ?, ?)"); stmt.setString(1, "000000000000001"); stmt.setInt(2, 100); stmt.setBigDecimal(3, BigDecimal.valueOf(0.5)); stmt.setBigDecimal(4, BigDecimal.valueOf(0.2)); stmt.setInt(5, 1000); stmt.setLong(6, 11111); stmt.setLong(7, 11112); stmt.execute(); stmt.setString(1, "000000000000002"); stmt.setInt(2, 200); stmt.setBigDecimal(3, BigDecimal.valueOf(2.5)); stmt.setBigDecimal(4, BigDecimal.valueOf(2.2)); stmt.setInt(5, 2000); stmt.setLong(6, 2222); stmt.setLong(7, 22222); stmt.execute(); conn.commit(); } @Test public void testConstantCount() throws Exception { String query = "SELECT count(1) from " + fullTableName; Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); Connection conn = DriverManager.getConnection(url, props); try { createTable(conn); initTableValues(conn); analyzeTable(conn, fullTableName); PreparedStatement statement = conn.prepareStatement(query); ResultSet rs = statement.executeQuery(); assertTrue(rs.next()); assertEquals(2, rs.getLong(1)); assertFalse(rs.next()); } finally { conn.close(); } } @Test public void testCFToDisambiguateInSelectOnly1() throws Exception { String query = "SELECT F.RESPONSE_TIME,G.RESPONSE_TIME from " + fullTableName + " where ID = '000000000000002'"; Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); Connection conn = DriverManager.getConnection(url, props); try { createTable(conn); initTableValues(conn); analyzeTable(conn, fullTableName); PreparedStatement statement = conn.prepareStatement(query); ResultSet rs = statement.executeQuery(); assertTrue(rs.next()); assertEquals(2222, rs.getLong(1)); assertEquals(22222, rs.getLong(2)); assertFalse(rs.next()); } finally { conn.close(); } } @Test public void testCFToDisambiguateInSelectOnly2() throws Exception { String query = "SELECT F.RESPONSE_TIME,G.RESPONSE_TIME from " + fullTableName + " where TRANSACTION_COUNT = 200"; Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); Connection conn = DriverManager.getConnection(url, props); try { createTable(conn); initTableValues(conn); analyzeTable(conn, fullTableName); PreparedStatement statement = conn.prepareStatement(query); ResultSet rs = statement.executeQuery(); assertTrue(rs.next()); assertEquals(2222, rs.getLong(1)); assertEquals(22222, rs.getLong(2)); assertFalse(rs.next()); } finally { conn.close(); } } @Test public void testGuidePostsForMultiCFs() throws Exception { String query = "SELECT F.RESPONSE_TIME,G.RESPONSE_TIME from " + fullTableName + " where F.RESPONSE_TIME = 2222"; Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); Connection conn = DriverManager.getConnection(url, props); try { createTable(conn); initTableValues(conn); analyzeTable(conn, fullTableName); PreparedStatement statement = conn.prepareStatement(query); ResultSet rs = statement.executeQuery(); assertTrue(rs.next()); assertEquals(2222, rs.getLong(1)); assertEquals(22222, rs.getLong(2)); assertFalse(rs.next()); // Use E column family. Since the column family with the empty key value (the first one, A) // is always added to the scan, we never really use other guideposts (but this may change). List<KeyRange> splits = getAllSplits(conn, fullTableName, "e.cpu_utilization IS NOT NULL", "COUNT(*)"); // Since the E column family is not populated, it won't have as many splits assertEquals(3, splits.size()); // Same as above for G column family. splits = getAllSplits(conn, fullTableName, "g.response_time IS NOT NULL", "COUNT(*)"); assertEquals(3, splits.size()); } finally { conn.close(); } } @Test public void testGuidePostsForMultiCFsOverUnevenDistrib() throws Exception { Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); Connection conn = DriverManager.getConnection(getUrl(), props); conn.createStatement().execute( "CREATE TABLE " + fullTableName + " (K1 CHAR(1) NOT NULL, " + "K2 VARCHAR NOT NULL, " + "CF1.A INTEGER, " + "CF2.B INTEGER, " + "CF3.C INTEGER, " + "CF4.D INTEGER, " + "CF5.E INTEGER, " + "CF6.F INTEGER " + "CONSTRAINT PK PRIMARY KEY (K1,K2)) SPLIT ON ('B','C','D')"); for (int i = 0; i < 100; i++) { String upsert = "UPSERT INTO " + fullTableName + "(K1,K2,A) VALUES('" + Character.toString((char)('A' + i % 10)) + "','" + (i * 10) + "'," + i + ")"; conn.createStatement().execute(upsert); if (i % 10 == 0) { conn.createStatement().execute( "UPSERT INTO " + fullTableName + "(K1,K2,F) VALUES('" + Character.toString((char)('A' + i % 10)) + "','" + (i * 10) + "'," + (i * 10) + ")"); } } conn.commit(); try { analyzeTable(conn, fullTableName); PreparedStatement statement = conn.prepareStatement("select count(*) from " + fullTableName + " where f < 400"); ResultSet rs = statement.executeQuery(); assertTrue(rs.next()); assertEquals(4, rs.getLong(1)); assertFalse(rs.next()); List<KeyRange> splits = getAllSplits(conn, fullTableName, "f < 400", "COUNT(*)"); // Uses less populated column f assertEquals(14, splits.size()); // Uses more populated column a splits = getAllSplits(conn, fullTableName, "a < 80", "COUNT(*)"); assertEquals(104, splits.size()); } finally { conn.close(); } } @Test public void testGuidePostsRetrievedForMultiCF() throws Exception { Connection conn; PreparedStatement stmt; ResultSet rs; Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); conn = DriverManager.getConnection(getUrl(), props); conn.createStatement().execute( "CREATE TABLE " + fullTableName + " ( k INTEGER PRIMARY KEY, A.V1 VARCHAR, B.V2 VARCHAR, C.V3 VARCHAR)"); stmt = conn.prepareStatement("UPSERT INTO " + fullTableName + " VALUES(?,?,?,?)"); stmt.setInt(1, 1); stmt.setString(2, "A"); stmt.setString(3, "B"); stmt.setString(4, "C"); stmt.execute(); conn.commit(); stmt = conn.prepareStatement("UPSERT INTO " + fullTableName + " VALUES(?,?,?,?)"); stmt.setInt(1, 2); stmt.setString(2, "D"); stmt.setString(3, "E"); stmt.setString(4, "F"); stmt.execute(); conn.commit(); stmt = conn.prepareStatement("UPSERT INTO " + fullTableName + "(k, A.V1, C.V3) VALUES(?,?,?)"); stmt.setInt(1, 3); stmt.setString(2, "E"); stmt.setString(3, "X"); stmt.execute(); conn.commit(); stmt = conn.prepareStatement("UPSERT INTO " + fullTableName + "(k, A.V1, C.V3) VALUES(?,?,?)"); stmt.setInt(1, 4); stmt.setString(2, "F"); stmt.setString(3, "F"); stmt.execute(); conn.commit(); analyzeTable(conn, fullTableName); rs = conn.createStatement().executeQuery("SELECT B.V2 FROM " + fullTableName + " WHERE B.V2 = 'B'"); assertTrue(rs.next()); assertEquals("B", rs.getString(1)); List<KeyRange> splits = getAllSplits(conn, fullTableName, "C.V3 = 'X'", "A.V1"); assertEquals(5, splits.size()); splits = getAllSplits(conn, fullTableName, "B.V2 = 'B'", "B.V2"); assertEquals(3, splits.size()); conn.close(); } @Test public void testCFToDisambiguate2() throws Exception { String query = "SELECT F.RESPONSE_TIME,G.RESPONSE_TIME from " + fullTableName + " where G.RESPONSE_TIME-1 = F.RESPONSE_TIME"; Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); Connection conn = DriverManager.getConnection(url, props); try { createTable(conn); initTableValues(conn); analyzeTable(conn, fullTableName); PreparedStatement statement = conn.prepareStatement(query); ResultSet rs = statement.executeQuery(); assertTrue(rs.next()); assertEquals(11111, rs.getLong(1)); assertEquals(11112, rs.getLong(2)); assertFalse(rs.next()); } finally { conn.close(); } } @Test public void testDefaultCFToDisambiguate() throws Exception { Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); Connection conn = DriverManager.getConnection(url, props); createTable(conn); initTableValues(conn); String ddl = "ALTER TABLE " + fullTableName + " ADD response_time BIGINT"; conn.createStatement().execute(ddl); String dml = "upsert into " + fullTableName + "(" + " ID, " + " RESPONSE_TIME)" + "VALUES ('000000000000003', 333)"; conn.createStatement().execute(dml); conn.commit(); analyzeTable(conn, fullTableName); String query = "SELECT ID,RESPONSE_TIME from " + fullTableName + " where RESPONSE_TIME = 333"; try { PreparedStatement statement = conn.prepareStatement(query); ResultSet rs = statement.executeQuery(); assertTrue(rs.next()); assertEquals("000000000000003", rs.getString(1)); assertEquals(333, rs.getLong(2)); assertFalse(rs.next()); } finally { conn.close(); } } @Test public void testEssentialColumnFamilyForRowKeyFilter() throws Exception { String query = "SELECT F.RESPONSE_TIME,G.RESPONSE_TIME from " + fullTableName + " where SUBSTR(ID, 15) = '2'"; Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); Connection conn = DriverManager.getConnection(url, props); try { createTable(conn); initTableValues(conn); analyzeTable(conn, fullTableName); PreparedStatement statement = conn.prepareStatement(query); ResultSet rs = statement.executeQuery(); assertTrue(rs.next()); assertEquals(2222, rs.getLong(1)); assertEquals(22222, rs.getLong(2)); assertFalse(rs.next()); } finally { conn.close(); } } @Test public void testCFWildcardProjection() throws Exception { try (Connection conn = DriverManager.getConnection(getUrl())) { String tableName = generateUniqueName(); String ddl = "CREATE TABLE IF NOT EXISTS " + tableName + " (pk1 INTEGER NOT NULL PRIMARY KEY, x.v1 VARCHAR, y.v2 INTEGER)"; conn.createStatement().execute(ddl); conn.createStatement().execute("UPSERT INTO " + tableName + " VALUES(1, 'test', 2)"); conn.commit(); ResultSet rs = conn.createStatement().executeQuery("SELECT x.* FROM "+tableName+" WHERE y.v2 = 2"); assertTrue(rs.next()); assertEquals("test", rs.getString(1)); rs.close(); // make sure this works with a local index as well (only the data plan needs to be adjusted) conn.createStatement().execute("CREATE LOCAL INDEX " + tableName + "_IDX ON " + tableName + "(y.v2)"); conn.commit(); rs = conn.createStatement().executeQuery("SELECT x.* FROM "+tableName+" WHERE y.v2 = 2"); assertTrue(rs.next()); assertEquals("test", rs.getString(1)); rs.close(); rs = conn.createStatement().executeQuery("SELECT y.* FROM "+tableName+" WHERE x.v1 <> 'blah'"); assertTrue(rs.next()); assertEquals(2, rs.getInt(1)); rs.close(); } } @Test public void testMultipleCFWildcardProjection() throws Exception { try (Connection conn = DriverManager.getConnection(getUrl())) { String tableName = generateUniqueName(); String ddl = "CREATE TABLE IF NOT EXISTS " + tableName + " (pk1 INTEGER NOT NULL PRIMARY KEY, x.v1 VARCHAR, y.v2 INTEGER, z.v3 INTEGER)"; conn.createStatement().execute(ddl); conn.createStatement().execute("UPSERT INTO " + tableName + " VALUES(1, 'test', 2, 3)"); conn.commit(); ResultSet rs = conn.createStatement().executeQuery("SELECT x.*, z.* FROM "+tableName+" WHERE y.v2 = 2"); assertTrue(rs.next()); assertEquals("test", rs.getString(1)); assertEquals(3, rs.getInt(2)); rs.close(); // make sure this works with a local index as well (only the data plan needs to be adjusted) conn.createStatement().execute("CREATE LOCAL INDEX " + tableName + "_IDX ON " + tableName + "(y.v2)"); conn.commit(); rs = conn.createStatement().executeQuery("SELECT x.*, z.* FROM "+tableName+" WHERE y.v2 = 2"); assertTrue(rs.next()); assertEquals("test", rs.getString(1)); assertEquals(3, rs.getInt(2)); rs.close(); rs = conn.createStatement().executeQuery("SELECT x.*, y.* FROM "+tableName+" WHERE z.v3 = 3"); assertTrue(rs.next()); assertEquals("test", rs.getString(1)); assertEquals(2, rs.getInt(2)); rs.close(); } } @Test public void testMixedDefaultAndExplicitCFs() throws Exception { try (Connection conn = DriverManager.getConnection(getUrl())) { String tableName = generateUniqueName(); String ddl = "CREATE TABLE IF NOT EXISTS " + tableName + " (pk1 INTEGER NOT NULL PRIMARY KEY, v1 VARCHAR, y.v1 INTEGER)"; conn.createStatement().execute(ddl); conn.createStatement().execute("UPSERT INTO " + tableName + " VALUES(1, 'test', 2)"); conn.commit(); ResultSet rs = conn.createStatement().executeQuery("SELECT * FROM "+tableName); assertTrue(rs.next()); // Without PHOENIX-6423 this would throw a type mismatch exception, because it would confuse the 3rd // column to also be the VARCHAR column. assertEquals(2, rs.getInt(3)); rs.close(); // make sure this works with a local index as well (only the data plan needs to be adjusted) conn.createStatement().execute("CREATE LOCAL INDEX " + tableName + "_IDX ON " + tableName + "(v1)"); conn.commit(); rs = conn.createStatement().executeQuery("SELECT * FROM "+tableName); assertTrue(rs.next()); assertEquals(2, rs.getInt(3)); rs.close(); } } @Test public void testBug3890() throws Exception { try (Connection conn = DriverManager.getConnection(getUrl())) { String tableName = generateUniqueName(); String ddl = "CREATE TABLE IF NOT EXISTS " + tableName + " (HOST CHAR(2) NOT NULL," + " DOMAIN VARCHAR NOT NULL," + " FEATURE VARCHAR NOT NULL," + " DATE DATE NOT NULL," + " USAGE.CORE BIGINT," + " USAGE.DB BIGINT," + " STATS.ACTIVE_VISITOR INTEGER" + " CONSTRAINT PK PRIMARY KEY (HOST, DOMAIN, FEATURE, DATE))"; conn.createStatement().execute(ddl); String upsert = "UPSERT INTO " + tableName + " VALUES (?, ?, ?, ?, ?, ?, ?)"; try (PreparedStatement stmt = conn.prepareStatement(upsert)) { stmt.setString(1, "H1"); stmt.setString(2, "Salesforce"); stmt.setString(3, "F1"); stmt.setDate(4, new Date(100)); stmt.setLong(5, 100l); stmt.setLong(6, 2000l); stmt.setLong(7, 10); stmt.executeUpdate(); stmt.setString(1, "H2"); stmt.setString(2, "Heroku"); stmt.setString(3, "F1"); stmt.setDate(4, new Date(100)); stmt.setLong(5, 100l); stmt.setLong(6, 1000l); stmt.setLong(7, 10); stmt.executeUpdate(); conn.commit(); } String query = "SELECT DOMAIN, AVG(CORE) Average_CPU_Usage, AVG(DB) Average_DB_Usage FROM " + tableName + " GROUP BY DOMAIN ORDER BY DOMAIN DESC"; ResultSet rs = conn.createStatement().executeQuery(query); rs.next(); assertEquals("Salesforce", rs.getString(1)); assertEquals(0, Double.compare(100, rs.getDouble(2))); assertEquals(0, Double.compare(2000, rs.getDouble(3))); assertTrue(rs.next()); assertEquals("Heroku", rs.getString(1)); assertEquals(0, Double.compare(100, rs.getDouble(2))); assertEquals(0, Double.compare(1000, rs.getDouble(3))); assertFalse(rs.next()); query = "SELECT TRUNC(DATE,'DAY') DAY, SUM(CORE) TOTAL_CPU_Usage, MIN(CORE) MIN_CPU_Usage, MAX(CORE) MAX_CPU_Usage" + " FROM " + tableName + " WHERE DOMAIN LIKE 'Salesforce%'" + " GROUP BY TRUNC(DATE,'DAY')"; rs = conn.createStatement().executeQuery(query); rs.next(); assertEquals(0, rs.getLong(1)); assertEquals((Long) 100l, Long.valueOf(rs.getLong(2))); assertEquals((Long) 100l, Long.valueOf(rs.getLong(3))); assertEquals((Long) 100l, Long.valueOf(rs.getLong(4))); assertFalse(rs.next()); query = "SELECT HOST, SUM(ACTIVE_VISITOR) TOTAL_ACTIVE_VISITORS FROM " + tableName + " WHERE DB > (CORE * 10)" + " GROUP BY HOST"; rs = conn.createStatement().executeQuery(query); rs.next(); assertEquals("H1", rs.getString(1)); assertEquals(10, rs.getInt(2)); assertFalse(rs.next()); } } @Test public void testBug4658() throws Exception { try (Connection conn = DriverManager.getConnection(getUrl()); Statement stmt = conn.createStatement()) { String tableName = generateUniqueName(); stmt.execute("CREATE TABLE " + tableName + " (" + "COL1 VARCHAR NOT NULL," + "COL2 VARCHAR NOT NULL," + "COL3 VARCHAR," + "FAM.COL4 VARCHAR," + "CONSTRAINT TRADE_EVENT_PK PRIMARY KEY (COL1, COL2))"); stmt.execute("UPSERT INTO " + tableName + " (COL1, COL2) values ('111', 'AAA')"); stmt.execute("UPSERT INTO " + tableName + " (COL1, COL2) values ('222', 'AAA')"); conn.commit(); try (ResultSet rs = stmt.executeQuery( "SELECT * FROM " + tableName + " WHERE COL2 = 'AAA' ORDER BY COL1 DESC")) { assertTrue(rs.next()); assertEquals(rs.getString("COL1"), "222"); assertEquals(rs.getString("COL2"), "AAA"); assertTrue(rs.next()); assertEquals(rs.getString("COL1"), "111"); assertEquals(rs.getString("COL2"), "AAA"); assertFalse(rs.next()); } // Tests for FORWARD_SCAN hint String query = "SELECT /*+ FORWARD_SCAN */ * FROM " + tableName + " WHERE COL2 = 'AAA' ORDER BY COL1 DESC"; try (ResultSet rs = stmt.executeQuery("EXPLAIN " + query)) { String explainPlan = QueryUtil.getExplainPlan(rs); assertFalse(explainPlan.contains("REVERSE")); } try (ResultSet rs = stmt.executeQuery(query)) { assertTrue(rs.next()); assertEquals(rs.getString("COL1"), "222"); assertEquals(rs.getString("COL2"), "AAA"); assertTrue(rs.next()); assertEquals(rs.getString("COL1"), "111"); assertEquals(rs.getString("COL2"), "AAA"); assertFalse(rs.next()); } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.sling.pipes; import org.apache.commons.io.IOUtils; import org.apache.sling.api.resource.Resource; import org.apache.sling.api.resource.ResourceResolver; import org.apache.sling.api.resource.ValueMap; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.script.Bindings; import javax.script.Invocable; import javax.script.ScriptContext; import javax.script.ScriptEngine; import javax.script.ScriptEngineManager; import javax.script.ScriptException; import javax.script.SimpleScriptContext; import java.io.InputStream; import java.io.InputStreamReader; import java.net.URL; import java.util.Calendar; import java.util.Date; import java.util.HashMap; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * Execution bindings of a pipe */ public class PipeBindings { private static final Logger log = LoggerFactory.getLogger(PipeBindings.class); public static final String NN_ADDITIONALBINDINGS = "additionalBindings"; public static final String PN_ADDITIONALSCRIPTS = "additionalScripts"; ScriptEngine engine = new ScriptEngineManager().getEngineByName("nashorn"); ScriptContext scriptContext = new SimpleScriptContext(); public static final String PATH_BINDING = "path"; Map<String, String> pathBindings = new HashMap<>(); Map<String, Resource> outputResources = new HashMap<>(); private static final Pattern INJECTED_SCRIPT = Pattern.compile("\\$\\{(([^\\{^\\}]*(\\{[0-9,]+\\})?)*)\\}"); /** * public constructor */ public PipeBindings(Resource resource){ engine.setContext(scriptContext); //add path bindings where path.MyPipe will give MyPipe current resource path getBindings().put(PATH_BINDING, pathBindings); //additional bindings (global variables to use in child pipes expressions) Resource additionalBindings = resource.getChild(NN_ADDITIONALBINDINGS); if (additionalBindings != null) { ValueMap bindings = additionalBindings.adaptTo(ValueMap.class); addBindings(bindings); } Resource scriptsResource = resource.getChild(PN_ADDITIONALSCRIPTS); if (scriptsResource != null) { String[] scripts = scriptsResource.adaptTo(String[].class); if (scripts != null) { for (String script : scripts){ addScript(resource.getResourceResolver(), script); } } } } /** * add a script file to the engine * @param resolver * @param path */ public void addScript(ResourceResolver resolver, String path) { InputStream is = null; try { if (path.startsWith("http")) { try { URL remoteScript = new URL(path); is = remoteScript.openStream(); } catch (Exception e) { log.error("unable to retrieve remote script", e); } } else if (path.startsWith("/")) { Resource scriptResource = resolver.getResource(path); if (scriptResource != null) { is = scriptResource.adaptTo(InputStream.class); } } if (is != null) { try { engine.eval(new InputStreamReader(is), scriptContext); } catch (Exception e) { log.error("unable to execute {}", path); } } } finally { IOUtils.closeQuietly(is); } } /** * adds additional bindings (global variables to use in child pipes expressions) * @param bindings */ public void addBindings(Map bindings) { log.info("Adding bindings {}", bindings); getBindings().putAll(bindings); } /** * Update current resource of a given pipe, and appropriate binding * @param pipe * @param resource */ public void updateBindings(Pipe pipe, Resource resource) { outputResources.put(pipe.getName(), resource); if (resource != null) { pathBindings.put(pipe.getName(), resource.getPath()); } addBinding(pipe.getName(), pipe.getOutputBinding()); } public void addBinding(String name, Object value){ getBindings().put(name, value); } public boolean isBindingDefined(String name){ return getBindings().containsKey(name); } public Bindings getBindings() { return scriptContext.getBindings(ScriptContext.ENGINE_SCOPE); } /** * Doesn't look like nashorn likes template strings :-( * @param expr * @return */ protected String computeECMA5Expression(String expr){ Matcher matcher = INJECTED_SCRIPT.matcher(expr); if (INJECTED_SCRIPT.matcher(expr).find()) { StringBuilder expression = new StringBuilder(); int start = 0; while (matcher.find()) { if (matcher.start() > start) { if (expression.length() == 0) { expression.append("'"); } expression.append(expr.substring(start, matcher.start())); } if (expression.length() > 0) { expression.append("' + "); } expression.append(matcher.group(1)); start = matcher.end(); if (start < expr.length()) { expression.append(" + '"); } } if (start < expr.length()) { expression.append(expr.substring(start) + "'"); } return expression.toString(); } return null; } /** * * @param expr * @return * @throws ScriptException */ protected Object evaluate(String expr) throws ScriptException { String computed = computeECMA5Expression(expr); if (computed != null){ //computed is null in case expr is a simple string return engine.eval(computed, scriptContext); } return expr; } /** * Expression is a function of variables from execution context, that * we implement here as a String * @param expr * @return */ public String instantiateExpression(String expr){ try { return (String)evaluate(expr); } catch (ScriptException e) { log.error("Unable to evaluate the script", e); } return expr; } /** * Instantiate object from expression * @param expr * @return */ public Object instantiateObject(String expr){ try { Object result = evaluate(expr); if (result != null && ! result.getClass().getName().startsWith("java.lang.")) { //special case of the date in which case jdk.nashorn.api.scripting.ScriptObjectMirror will //be returned JsDate jsDate = ((Invocable) engine).getInterface(result, JsDate.class); if (jsDate != null ) { Date date = new Date(jsDate.getTime() + jsDate.getTimezoneOffset() * 60 * 1000); Calendar cal = Calendar.getInstance(); cal.setTime(date); return cal; } } return result; } catch (ScriptException e) { log.error("Unable to evaluate the script for expr {} ", expr, e); } return expr; } /** * * @param name * @return */ public Resource getExecutedResource(String name) { return outputResources.get(name); } /** * interface mapping a javascript date */ public interface JsDate { long getTime(); int getTimezoneOffset(); } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.olingo2; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.Map; import java.util.Set; import org.apache.camel.Consumer; import org.apache.camel.Processor; import org.apache.camel.Producer; import org.apache.camel.component.olingo2.internal.Olingo2ApiCollection; import org.apache.camel.component.olingo2.internal.Olingo2ApiName; import org.apache.camel.component.olingo2.internal.Olingo2Constants; import org.apache.camel.component.olingo2.internal.Olingo2PropertiesHelper; import org.apache.camel.spi.UriEndpoint; import org.apache.camel.spi.UriParam; import org.apache.camel.util.component.AbstractApiEndpoint; import org.apache.camel.util.component.ApiMethod; import org.apache.camel.util.component.ApiMethodPropertiesHelper; /** * Communicates with OData 2.0 services using Apache Olingo. */ @UriEndpoint(firstVersion = "2.14.0", scheme = "olingo2", title = "Olingo2", syntax = "olingo2:apiName/methodName", consumerClass = Olingo2Consumer.class, label = "cloud") public class Olingo2Endpoint extends AbstractApiEndpoint<Olingo2ApiName, Olingo2Configuration> { protected static final String RESOURCE_PATH_PROPERTY = "resourcePath"; protected static final String RESPONSE_HANDLER_PROPERTY = "responseHandler"; private static final String KEY_PREDICATE_PROPERTY = "keyPredicate"; private static final String QUERY_PARAMS_PROPERTY = "queryParams"; private static final String ENDPOINT_HTTP_HEADERS_PROPERTY = "endpointHttpHeaders"; private static final String READ_METHOD = "read"; private static final String EDM_PROPERTY = "edm"; private static final String DATA_PROPERTY = "data"; private static final String DELETE_METHOD = "delete"; // unparsed variants private static final String UREAD_METHOD = "uread"; private final Set<String> endpointPropertyNames; @UriParam private Olingo2Configuration configuration; private Olingo2AppWrapper apiProxy; public Olingo2Endpoint(String uri, Olingo2Component component, Olingo2ApiName apiName, String methodName, Olingo2Configuration endpointConfiguration) { super(uri, component, apiName, methodName, Olingo2ApiCollection.getCollection().getHelper(apiName), endpointConfiguration); this.configuration = endpointConfiguration; // get all endpoint property names endpointPropertyNames = new HashSet<String>(getPropertiesHelper().getValidEndpointProperties(configuration)); // avoid adding edm as queryParam endpointPropertyNames.add(EDM_PROPERTY); endpointPropertyNames.add(ENDPOINT_HTTP_HEADERS_PROPERTY); } public Producer createProducer() throws Exception { return new Olingo2Producer(this); } public Consumer createConsumer(Processor processor) throws Exception { // make sure inBody is not set for consumers if (inBody != null) { throw new IllegalArgumentException("Option inBody is not supported for consumer endpoint"); } // only read method is supported if (!READ_METHOD.equals(methodName) && !UREAD_METHOD.equals(methodName)) { throw new IllegalArgumentException("Only read method is supported for consumer endpoints"); } final Olingo2Consumer consumer = new Olingo2Consumer(this, processor); // also set consumer.* properties configureConsumer(consumer); return consumer; } @Override protected ApiMethodPropertiesHelper<Olingo2Configuration> getPropertiesHelper() { return Olingo2PropertiesHelper.getHelper(); } protected String getThreadProfileName() { return Olingo2Constants.THREAD_PROFILE_NAME; } @Override public void configureProperties(Map<String, Object> options) { // handle individual query params parseQueryParams(options); super.configureProperties(options); } @Override protected void afterConfigureProperties() { // set default inBody if (!(READ_METHOD.equals(methodName) || DELETE_METHOD.equals(methodName) || UREAD_METHOD.equals(methodName)) && inBody == null) { inBody = DATA_PROPERTY; } createProxy(); } @Override public synchronized Object getApiProxy(ApiMethod method, Map<String, Object> args) { return apiProxy.getOlingo2App(); } @Override public Olingo2Component getComponent() { return (Olingo2Component) super.getComponent(); } @Override protected void doStart() throws Exception { if (apiProxy == null) { createProxy(); } } @Override protected void doStop() throws Exception { if (apiProxy != null) { // close the apiProxy getComponent().closeApiProxy(apiProxy); apiProxy = null; } } @Override public void interceptPropertyNames(Set<String> propertyNames) { // add edm, and responseHandler property names // edm is computed on first call to getApiProxy(), and responseHandler is provided by consumer and producer if (!DELETE_METHOD.equals(methodName)) { propertyNames.add(EDM_PROPERTY); } propertyNames.add(RESPONSE_HANDLER_PROPERTY); } @Override public void interceptProperties(Map<String, Object> properties) { // read Edm if not set yet properties.put(EDM_PROPERTY, apiProxy.getEdm()); // handle keyPredicate final String keyPredicate = (String) properties.get(KEY_PREDICATE_PROPERTY); if (keyPredicate != null) { // make sure a resource path is provided final String resourcePath = (String) properties.get(RESOURCE_PATH_PROPERTY); if (resourcePath == null) { throw new IllegalArgumentException("Resource path must be provided in endpoint URI, or URI parameter '" + RESOURCE_PATH_PROPERTY + "', or exchange header '" + Olingo2Constants.PROPERTY_PREFIX + RESOURCE_PATH_PROPERTY + "'"); } // append keyPredicate to dynamically create resource path properties.put(RESOURCE_PATH_PROPERTY, resourcePath + '(' + keyPredicate + ')'); } // handle individual queryParams parseQueryParams(properties); } private void createProxy() { apiProxy = getComponent().createApiProxy(getConfiguration()); } private void parseQueryParams(Map<String, Object> options) { // extract non-endpoint properties as query params final Map<String, String> queryParams = new HashMap<String, String>(); for (Iterator<Map.Entry<String, Object>> it = options.entrySet().iterator(); it.hasNext();) { final Map.Entry<String, Object> entry = it.next(); final String paramName = entry.getKey(); if (!endpointPropertyNames.contains(paramName)) { // add to query params final Object value = entry.getValue(); if (value == null) { throw new IllegalArgumentException("Null value for query parameter " + paramName); } queryParams.put(paramName, value.toString()); // remove entry from supplied options it.remove(); } } if (!queryParams.isEmpty()) { @SuppressWarnings("unchecked") final Map<String, String> oldParams = (Map<String, String>) options.get(QUERY_PARAMS_PROPERTY); if (oldParams == null) { // set queryParams property options.put(QUERY_PARAMS_PROPERTY, queryParams); } else { // overwrite old params in supplied map oldParams.putAll(queryParams); } } } }
/* * Copyright 2009-2018, Acciente LLC * * Acciente LLC licenses this file to you under the * Apache License, Version 2.0 (the "License"); you * may not use this file except in compliance with the * License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in * writing, software distributed under the License is * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES * OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing * permissions and limitations under the License. */ package com.acciente.oacc; import org.junit.Test; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.hasItem; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.sameInstance; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; public class TestDomainPermission { @Test public void getSysPermissionNames() { assertThat(DomainPermissions.getSysPermissionNames().size(), is(3)); assertThat(DomainPermissions.getSysPermissionNames(), hasItem(DomainPermissions.DELETE)); assertThat(DomainPermissions.getSysPermissionNames(), hasItem(DomainPermissions.CREATE_CHILD_DOMAIN)); assertThat(DomainPermissions.getSysPermissionNames(), hasItem(DomainPermissions.SUPER_USER)); } @Test public void construct_valid() { for(String systemPermissionName : DomainPermissions.getSysPermissionNames()) { DomainPermissions.getInstance(systemPermissionName); } } @Test public void cache_valid() { for(String systemPermissionName : DomainPermissions.getSysPermissionNames()) { final DomainPermission domainPermission = DomainPermissions.getInstance(systemPermissionName); assertThat(DomainPermissions.getInstance(systemPermissionName), sameInstance(domainPermission)); } } @Test public void construct_withGrant_valid() { for(String systemPermissionName : DomainPermissions.getSysPermissionNames()) { DomainPermissions.getInstanceWithGrantOption(systemPermissionName); } } @Test public void cache_withGrant_valid() { for(String systemPermissionName : DomainPermissions.getSysPermissionNames()) { final DomainPermission domainPermission = DomainPermissions.getInstanceWithGrantOption(systemPermissionName); assertThat(DomainPermissions.getInstanceWithGrantOption(systemPermissionName), sameInstance(domainPermission)); } } @Test public void construct_whitespaceConsistent() { for(String systemPermissionName : DomainPermissions.getSysPermissionNames()) { assertThat(DomainPermissions.getInstance(" " + systemPermissionName + "\t").getPermissionName(), is(systemPermissionName)); assertThat(DomainPermissions.getInstanceWithGrantOption(" " + systemPermissionName + "\t").getPermissionName(), is(systemPermissionName)); } } @Test public void cache_whitespaceConsistent() { for(String systemPermissionName : DomainPermissions.getSysPermissionNames()) { final DomainPermission domainPermission = DomainPermissions.getInstance(" " + systemPermissionName + "\t"); assertThat(DomainPermissions.getInstance(systemPermissionName), sameInstance(domainPermission)); final DomainPermission grantableDomainPermission = DomainPermissions.getInstanceWithGrantOption(" " + systemPermissionName + "\t"); assertThat(DomainPermissions.getInstanceWithGrantOption(systemPermissionName), sameInstance(grantableDomainPermission)); } } @Test public void construct_caseSensitiveConsistent() { for(String systemPermissionName : DomainPermissions.getSysPermissionNames()) { String mixedCasePermissionName = systemPermissionName.substring(0, systemPermissionName.length()/2).toLowerCase() + systemPermissionName.substring(systemPermissionName.length()/2).toUpperCase(); try { DomainPermissions.getInstance(mixedCasePermissionName); fail("domain permission names are case sensitive - creation of domain permission with case insensitive name should have failed"); } catch (Exception e) { assertThat(e.getMessage().toLowerCase(), containsString("invalid system permission name")); } // now attempt with grant try { DomainPermissions.getInstanceWithGrantOption(mixedCasePermissionName); fail("domain permission names are case sensitive - creation of domain permission with case insensitive name should have failed"); } catch (Exception e) { assertThat(e.getMessage().toLowerCase(), containsString("invalid system permission name")); } } } @Test public void construct_nulls_shouldFail() { try { DomainPermissions.getInstance((String) null); fail("creation of domain permission with null name should have failed"); } catch (Exception e) { assertThat(e.getMessage().toLowerCase(), containsString("system permission name is required")); } // now attempt with grant try { DomainPermissions.getInstanceWithGrantOption(null); fail("creation of domain permission with null name should have failed"); } catch (Exception e) { assertThat(e.getMessage().toLowerCase(), containsString("system permission name is required")); } } @Test public void construct_asteriskPermissionPrefix_shouldFail() { try { DomainPermissions.getInstance("*invalid"); fail("creation of domain permission with asterisk-prefixed name should have failed"); } catch (Exception e) { assertThat(e.getMessage().toLowerCase(), containsString("invalid system permission name")); } // now attempt with grant try { DomainPermissions.getInstanceWithGrantOption("*invalid"); fail("creation of domain permission with asterisk-prefixed name should have failed"); } catch (Exception e) { assertThat(e.getMessage().toLowerCase(), containsString("invalid system permission name")); } } @Test public void construct_blankNames_shouldFail() { try { DomainPermissions.getInstance(""); fail("creation of domain permission with empty name should have failed"); } catch (Exception e) { assertThat(e.getMessage().toLowerCase(), containsString("system permission name is required")); } try { DomainPermissions.getInstance(" \t"); fail("creation of domain permission with empty name should have failed"); } catch (Exception e) { assertThat(e.getMessage().toLowerCase(), containsString("system permission name is required")); } // now attempt with grant try { DomainPermissions.getInstanceWithGrantOption(""); fail("creation of domain permission with empty name should have failed"); } catch (Exception e) { assertThat(e.getMessage().toLowerCase(), containsString("system permission name is required")); } try { DomainPermissions.getInstanceWithGrantOption(" \t"); fail("creation of domain permission with empty name should have failed"); } catch (Exception e) { assertThat(e.getMessage().toLowerCase(), containsString("system permission name is required")); } } @Test public void construct_nonSystemDomainPermission_shouldFail() { try { DomainPermissions.getInstance("invalid"); fail("creation of domain permission non-system domain permission name should have failed"); } catch (Exception e) { assertThat(e.getMessage().toLowerCase(), containsString("invalid system permission name")); } // now attempt with grant try { DomainPermissions.getInstanceWithGrantOption("invalid"); fail("creation of domain permission non-system domain permission name should have failed"); } catch (Exception e) { assertThat(e.getMessage().toLowerCase(), containsString("invalid system permission name")); } } @Test public void toStringTest() { for(String systemPermissionName : DomainPermissions.getSysPermissionNames()) { final DomainPermission domainPermission = DomainPermissions.getInstance(systemPermissionName); assertThat(domainPermission.toString(), is(systemPermissionName)); } } @Test public void toStringTest_withGrant() { for(String systemPermissionName : DomainPermissions.getSysPermissionNames()) { final DomainPermission domainPermission = DomainPermissions.getInstanceWithGrantOption(systemPermissionName); final String stringRepresentation = domainPermission.toString(); assertThat(stringRepresentation, is(systemPermissionName + " /G")); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.utils; import java.io.Closeable; import java.util.*; import com.google.common.collect.AbstractIterator; /** Merges sorted input iterators which individually contain unique items. */ public abstract class MergeIterator<In,Out> extends AbstractIterator<Out> implements IMergeIterator<In, Out> { protected final Reducer<In,Out> reducer; protected final List<? extends Iterator<In>> iterators; protected MergeIterator(List<? extends Iterator<In>> iters, Reducer<In, Out> reducer) { this.iterators = iters; this.reducer = reducer; } public static <In, Out> MergeIterator<In, Out> get(List<? extends Iterator<In>> sources, Comparator<? super In> comparator, Reducer<In, Out> reducer) { if (sources.size() == 1) { return reducer.trivialReduceIsTrivial() ? new TrivialOneToOne<>(sources, reducer) : new OneToOne<>(sources, reducer); } return new ManyToOne<>(sources, comparator, reducer); } public Iterable<? extends Iterator<In>> iterators() { return iterators; } public void close() { for (Iterator<In> iterator : this.iterators) { try { if (iterator instanceof AutoCloseable) ((AutoCloseable)iterator).close(); } catch (Exception e) { throw new RuntimeException(e); } } reducer.close(); } /** A MergeIterator that consumes multiple input values per output value. */ private static final class ManyToOne<In,Out> extends MergeIterator<In,Out> { // a queue for return: all candidates must be open and have at least one item protected final PriorityQueue<Candidate<In>> queue; // a stack of the last consumed candidates, so that we can lazily call 'advance()' // TODO: if we had our own PriorityQueue implementation we could stash items // at the end of its array, so we wouldn't need this storage protected final ArrayDeque<Candidate<In>> candidates; public ManyToOne(List<? extends Iterator<In>> iters, Comparator<? super In> comp, Reducer<In, Out> reducer) { super(iters, reducer); this.queue = new PriorityQueue<>(Math.max(1, iters.size())); for (int i = 0; i < iters.size(); i++) { Candidate<In> candidate = new Candidate<>(i, iters.get(i), comp); if (!candidate.advance()) // was empty continue; this.queue.add(candidate); } this.candidates = new ArrayDeque<>(queue.size()); } protected final Out computeNext() { advance(); return consume(); } /** Consume values by sending them to the reducer while they are equal. */ protected final Out consume() { reducer.onKeyChange(); Candidate<In> candidate = queue.peek(); if (candidate == null) return endOfData(); do { candidate = queue.poll(); candidates.push(candidate); reducer.reduce(candidate.idx, candidate.item); } while (queue.peek() != null && queue.peek().compareTo(candidate) == 0); return reducer.getReduced(); } /** Advance and re-enqueue all items we consumed in the last iteration. */ protected final void advance() { Candidate<In> candidate; while ((candidate = candidates.pollFirst()) != null) if (candidate.advance()) queue.add(candidate); } } // Holds and is comparable by the head item of an iterator it owns protected static final class Candidate<In> implements Comparable<Candidate<In>> { private final Iterator<? extends In> iter; private final Comparator<? super In> comp; private final int idx; private In item; public Candidate(int idx, Iterator<? extends In> iter, Comparator<? super In> comp) { this.iter = iter; this.comp = comp; this.idx = idx; } /** @return True if our iterator had an item, and it is now available */ protected boolean advance() { if (!iter.hasNext()) return false; item = iter.next(); return true; } public int compareTo(Candidate<In> that) { return comp.compare(this.item, that.item); } } /** Accumulator that collects values of type A, and outputs a value of type B. */ public static abstract class Reducer<In,Out> { /** * @return true if Out is the same as In for the case of a single source iterator */ public boolean trivialReduceIsTrivial() { return false; } /** * combine this object with the previous ones. * intermediate state is up to your implementation. */ public abstract void reduce(int idx, In current); /** @return The last object computed by reduce */ protected abstract Out getReduced(); /** * Called at the beginning of each new key, before any reduce is called. * To be overridden by implementing classes. */ protected void onKeyChange() {} /** * May be overridden by implementations that require cleaning up after use */ public void close() {} } private static class OneToOne<In, Out> extends MergeIterator<In, Out> { private final Iterator<In> source; public OneToOne(List<? extends Iterator<In>> sources, Reducer<In, Out> reducer) { super(sources, reducer); source = sources.get(0); } protected Out computeNext() { if (!source.hasNext()) return endOfData(); reducer.onKeyChange(); reducer.reduce(0, source.next()); return reducer.getReduced(); } } private static class TrivialOneToOne<In, Out> extends MergeIterator<In, Out> { private final Iterator<In> source; public TrivialOneToOne(List<? extends Iterator<In>> sources, Reducer<In, Out> reducer) { super(sources, reducer); source = sources.get(0); } @SuppressWarnings("unchecked") protected Out computeNext() { if (!source.hasNext()) return endOfData(); return (Out) source.next(); } } }
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University // Copyright (c) 2011, 2012 Open Networking Foundation // Copyright (c) 2012, 2013 Big Switch Networks, Inc. // This library was generated by the LoxiGen Compiler. // See the file LICENSE.txt which should have been included in the source distribution // Automatically generated by LOXI from template of_class.java // Do not modify package org.projectfloodlight.openflow.protocol.ver14; import org.projectfloodlight.openflow.protocol.*; import org.projectfloodlight.openflow.protocol.action.*; import org.projectfloodlight.openflow.protocol.actionid.*; import org.projectfloodlight.openflow.protocol.bsntlv.*; import org.projectfloodlight.openflow.protocol.errormsg.*; import org.projectfloodlight.openflow.protocol.meterband.*; import org.projectfloodlight.openflow.protocol.instruction.*; import org.projectfloodlight.openflow.protocol.instructionid.*; import org.projectfloodlight.openflow.protocol.match.*; import org.projectfloodlight.openflow.protocol.stat.*; import org.projectfloodlight.openflow.protocol.oxm.*; import org.projectfloodlight.openflow.protocol.oxs.*; import org.projectfloodlight.openflow.protocol.queueprop.*; import org.projectfloodlight.openflow.types.*; import org.projectfloodlight.openflow.util.*; import org.projectfloodlight.openflow.exceptions.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Set; import com.google.common.collect.ImmutableSet; import io.netty.buffer.ByteBuf; import com.google.common.hash.PrimitiveSink; import com.google.common.hash.Funnel; class OFBsnPortCounterStatsRequestVer14 implements OFBsnPortCounterStatsRequest { private static final Logger logger = LoggerFactory.getLogger(OFBsnPortCounterStatsRequestVer14.class); // version: 1.4 final static byte WIRE_VERSION = 5; final static int LENGTH = 28; private final static long DEFAULT_XID = 0x0L; private final static Set<OFStatsRequestFlags> DEFAULT_FLAGS = ImmutableSet.<OFStatsRequestFlags>of(); private final static OFPort DEFAULT_PORT_NO = OFPort.ANY; // OF message fields private final long xid; private final Set<OFStatsRequestFlags> flags; private final OFPort portNo; // // Immutable default instance final static OFBsnPortCounterStatsRequestVer14 DEFAULT = new OFBsnPortCounterStatsRequestVer14( DEFAULT_XID, DEFAULT_FLAGS, DEFAULT_PORT_NO ); // package private constructor - used by readers, builders, and factory OFBsnPortCounterStatsRequestVer14(long xid, Set<OFStatsRequestFlags> flags, OFPort portNo) { if(flags == null) { throw new NullPointerException("OFBsnPortCounterStatsRequestVer14: property flags cannot be null"); } if(portNo == null) { throw new NullPointerException("OFBsnPortCounterStatsRequestVer14: property portNo cannot be null"); } this.xid = xid; this.flags = flags; this.portNo = portNo; } // Accessors for OF message fields @Override public OFVersion getVersion() { return OFVersion.OF_14; } @Override public OFType getType() { return OFType.STATS_REQUEST; } @Override public long getXid() { return xid; } @Override public OFStatsType getStatsType() { return OFStatsType.EXPERIMENTER; } @Override public Set<OFStatsRequestFlags> getFlags() { return flags; } @Override public long getExperimenter() { return 0x5c16c7L; } @Override public long getSubtype() { return 0x8L; } @Override public OFPort getPortNo() { return portNo; } public OFBsnPortCounterStatsRequest.Builder createBuilder() { return new BuilderWithParent(this); } static class BuilderWithParent implements OFBsnPortCounterStatsRequest.Builder { final OFBsnPortCounterStatsRequestVer14 parentMessage; // OF message fields private boolean xidSet; private long xid; private boolean flagsSet; private Set<OFStatsRequestFlags> flags; private boolean portNoSet; private OFPort portNo; BuilderWithParent(OFBsnPortCounterStatsRequestVer14 parentMessage) { this.parentMessage = parentMessage; } @Override public OFVersion getVersion() { return OFVersion.OF_14; } @Override public OFType getType() { return OFType.STATS_REQUEST; } @Override public long getXid() { return xid; } @Override public OFBsnPortCounterStatsRequest.Builder setXid(long xid) { this.xid = xid; this.xidSet = true; return this; } @Override public OFStatsType getStatsType() { return OFStatsType.EXPERIMENTER; } @Override public Set<OFStatsRequestFlags> getFlags() { return flags; } @Override public OFBsnPortCounterStatsRequest.Builder setFlags(Set<OFStatsRequestFlags> flags) { this.flags = flags; this.flagsSet = true; return this; } @Override public long getExperimenter() { return 0x5c16c7L; } @Override public long getSubtype() { return 0x8L; } @Override public OFPort getPortNo() { return portNo; } @Override public OFBsnPortCounterStatsRequest.Builder setPortNo(OFPort portNo) { this.portNo = portNo; this.portNoSet = true; return this; } @Override public OFBsnPortCounterStatsRequest build() { long xid = this.xidSet ? this.xid : parentMessage.xid; Set<OFStatsRequestFlags> flags = this.flagsSet ? this.flags : parentMessage.flags; if(flags == null) throw new NullPointerException("Property flags must not be null"); OFPort portNo = this.portNoSet ? this.portNo : parentMessage.portNo; if(portNo == null) throw new NullPointerException("Property portNo must not be null"); // return new OFBsnPortCounterStatsRequestVer14( xid, flags, portNo ); } } static class Builder implements OFBsnPortCounterStatsRequest.Builder { // OF message fields private boolean xidSet; private long xid; private boolean flagsSet; private Set<OFStatsRequestFlags> flags; private boolean portNoSet; private OFPort portNo; @Override public OFVersion getVersion() { return OFVersion.OF_14; } @Override public OFType getType() { return OFType.STATS_REQUEST; } @Override public long getXid() { return xid; } @Override public OFBsnPortCounterStatsRequest.Builder setXid(long xid) { this.xid = xid; this.xidSet = true; return this; } @Override public OFStatsType getStatsType() { return OFStatsType.EXPERIMENTER; } @Override public Set<OFStatsRequestFlags> getFlags() { return flags; } @Override public OFBsnPortCounterStatsRequest.Builder setFlags(Set<OFStatsRequestFlags> flags) { this.flags = flags; this.flagsSet = true; return this; } @Override public long getExperimenter() { return 0x5c16c7L; } @Override public long getSubtype() { return 0x8L; } @Override public OFPort getPortNo() { return portNo; } @Override public OFBsnPortCounterStatsRequest.Builder setPortNo(OFPort portNo) { this.portNo = portNo; this.portNoSet = true; return this; } // @Override public OFBsnPortCounterStatsRequest build() { long xid = this.xidSet ? this.xid : DEFAULT_XID; Set<OFStatsRequestFlags> flags = this.flagsSet ? this.flags : DEFAULT_FLAGS; if(flags == null) throw new NullPointerException("Property flags must not be null"); OFPort portNo = this.portNoSet ? this.portNo : DEFAULT_PORT_NO; if(portNo == null) throw new NullPointerException("Property portNo must not be null"); return new OFBsnPortCounterStatsRequestVer14( xid, flags, portNo ); } } final static Reader READER = new Reader(); static class Reader implements OFMessageReader<OFBsnPortCounterStatsRequest> { @Override public OFBsnPortCounterStatsRequest readFrom(ByteBuf bb) throws OFParseError { int start = bb.readerIndex(); // fixed value property version == 5 byte version = bb.readByte(); if(version != (byte) 0x5) throw new OFParseError("Wrong version: Expected=OFVersion.OF_14(5), got="+version); // fixed value property type == 18 byte type = bb.readByte(); if(type != (byte) 0x12) throw new OFParseError("Wrong type: Expected=OFType.STATS_REQUEST(18), got="+type); int length = U16.f(bb.readShort()); if(length != 28) throw new OFParseError("Wrong length: Expected=28(28), got="+length); if(bb.readableBytes() + (bb.readerIndex() - start) < length) { // Buffer does not have all data yet bb.readerIndex(start); return null; } if(logger.isTraceEnabled()) logger.trace("readFrom - length={}", length); long xid = U32.f(bb.readInt()); // fixed value property statsType == 65535 short statsType = bb.readShort(); if(statsType != (short) 0xffff) throw new OFParseError("Wrong statsType: Expected=OFStatsType.EXPERIMENTER(65535), got="+statsType); Set<OFStatsRequestFlags> flags = OFStatsRequestFlagsSerializerVer14.readFrom(bb); // pad: 4 bytes bb.skipBytes(4); // fixed value property experimenter == 0x5c16c7L int experimenter = bb.readInt(); if(experimenter != 0x5c16c7) throw new OFParseError("Wrong experimenter: Expected=0x5c16c7L(0x5c16c7L), got="+experimenter); // fixed value property subtype == 0x8L int subtype = bb.readInt(); if(subtype != 0x8) throw new OFParseError("Wrong subtype: Expected=0x8L(0x8L), got="+subtype); OFPort portNo = OFPort.read4Bytes(bb); OFBsnPortCounterStatsRequestVer14 bsnPortCounterStatsRequestVer14 = new OFBsnPortCounterStatsRequestVer14( xid, flags, portNo ); if(logger.isTraceEnabled()) logger.trace("readFrom - read={}", bsnPortCounterStatsRequestVer14); return bsnPortCounterStatsRequestVer14; } } public void putTo(PrimitiveSink sink) { FUNNEL.funnel(this, sink); } final static OFBsnPortCounterStatsRequestVer14Funnel FUNNEL = new OFBsnPortCounterStatsRequestVer14Funnel(); static class OFBsnPortCounterStatsRequestVer14Funnel implements Funnel<OFBsnPortCounterStatsRequestVer14> { private static final long serialVersionUID = 1L; @Override public void funnel(OFBsnPortCounterStatsRequestVer14 message, PrimitiveSink sink) { // fixed value property version = 5 sink.putByte((byte) 0x5); // fixed value property type = 18 sink.putByte((byte) 0x12); // fixed value property length = 28 sink.putShort((short) 0x1c); sink.putLong(message.xid); // fixed value property statsType = 65535 sink.putShort((short) 0xffff); OFStatsRequestFlagsSerializerVer14.putTo(message.flags, sink); // skip pad (4 bytes) // fixed value property experimenter = 0x5c16c7L sink.putInt(0x5c16c7); // fixed value property subtype = 0x8L sink.putInt(0x8); message.portNo.putTo(sink); } } public void writeTo(ByteBuf bb) { WRITER.write(bb, this); } final static Writer WRITER = new Writer(); static class Writer implements OFMessageWriter<OFBsnPortCounterStatsRequestVer14> { @Override public void write(ByteBuf bb, OFBsnPortCounterStatsRequestVer14 message) { // fixed value property version = 5 bb.writeByte((byte) 0x5); // fixed value property type = 18 bb.writeByte((byte) 0x12); // fixed value property length = 28 bb.writeShort((short) 0x1c); bb.writeInt(U32.t(message.xid)); // fixed value property statsType = 65535 bb.writeShort((short) 0xffff); OFStatsRequestFlagsSerializerVer14.writeTo(bb, message.flags); // pad: 4 bytes bb.writeZero(4); // fixed value property experimenter = 0x5c16c7L bb.writeInt(0x5c16c7); // fixed value property subtype = 0x8L bb.writeInt(0x8); message.portNo.write4Bytes(bb); } } @Override public String toString() { StringBuilder b = new StringBuilder("OFBsnPortCounterStatsRequestVer14("); b.append("xid=").append(xid); b.append(", "); b.append("flags=").append(flags); b.append(", "); b.append("portNo=").append(portNo); b.append(")"); return b.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; OFBsnPortCounterStatsRequestVer14 other = (OFBsnPortCounterStatsRequestVer14) obj; if( xid != other.xid) return false; if (flags == null) { if (other.flags != null) return false; } else if (!flags.equals(other.flags)) return false; if (portNo == null) { if (other.portNo != null) return false; } else if (!portNo.equals(other.portNo)) return false; return true; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * (int) (xid ^ (xid >>> 32)); result = prime * result + ((flags == null) ? 0 : flags.hashCode()); result = prime * result + ((portNo == null) ? 0 : portNo.hashCode()); return result; } }
/*<license> Copyright 2004 - $Date$ by PeopleWare n.v.. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. </license>*/ package org.ppwcode.value_III.time; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.ppwcode.util.test.contract.Contract.contractFor; import static org.ppwcode.value_III.time.Duration.delta; import static org.ppwcode.value_III.time.Duration.sum; import static org.ppwcode.value_III.time.Duration.Unit.CENTURY; import static org.ppwcode.value_III.time.Duration.Unit.DAY; import static org.ppwcode.value_III.time.Duration.Unit.DECENNIUM; import static org.ppwcode.value_III.time.Duration.Unit.HOUR; import static org.ppwcode.value_III.time.Duration.Unit.MILLENNIUM; import static org.ppwcode.value_III.time.Duration.Unit.MILLISECOND; import static org.ppwcode.value_III.time.Duration.Unit.MINUTE; import static org.ppwcode.value_III.time.Duration.Unit.MONTH; import static org.ppwcode.value_III.time.Duration.Unit.QUARTER; import static org.ppwcode.value_III.time.Duration.Unit.SECOND; import static org.ppwcode.value_III.time.Duration.Unit.WEEK; import static org.ppwcode.value_III.time.Duration.Unit.YEAR; import static org.ppwcode.vernacular.exception_II.ProgrammingErrorHelpers.unexpectedException; import java.text.NumberFormat; import java.util.ArrayList; import java.util.List; import java.util.Locale; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.ppwcode.util.test.contract.NoSuchContractException; import org.ppwcode.value_III.time.Duration.Unit; import org.ppwcode.vernacular.value_III.ImmutableValue; import org.ppwcode.vernacular.value_III._Contract_ImmutableValue; public class DurationTest { @Test public void demo() { NumberFormat nf = NumberFormat.getIntegerInstance(new Locale("nl", "BE")); System.out.println("Long MAX = " + nf.format(Long.MAX_VALUE)); System.out.println("MILLISECOND.asMilliseconds() == " + nf.format(MILLISECOND.asMilliseconds()) + ", max = " + nf.format(MILLISECOND.maxDuration())); System.out.println("SECOND.asMilliseconds() == " + nf.format(SECOND.asMilliseconds()) + ", max = " + nf.format(SECOND.maxDuration())); System.out.println("MINUTE.asMilliseconds() == " + nf.format(MINUTE.asMilliseconds()) + ", max = " + nf.format(MINUTE.maxDuration())); System.out.println("HOUR.asMilliseconds() == " + nf.format(HOUR.asMilliseconds()) + ", max = " + nf.format(HOUR.maxDuration())); System.out.println("DAY.asMilliseconds() == " + nf.format(DAY.asMilliseconds()) + ", max = " + nf.format(DAY.maxDuration())); System.out.println("WEEK.asMilliseconds() == " + nf.format(WEEK.asMilliseconds()) + ", max = " + nf.format(WEEK.maxDuration())); System.out.println("MONTH.asMilliseconds() == " + nf.format(MONTH.asMilliseconds()) + ", max = " + nf.format(MONTH.maxDuration())); System.out.println("QUARTER.asMilliseconds() == " + nf.format(QUARTER.asMilliseconds()) + ", max = " + nf.format(QUARTER.maxDuration())); System.out.println("YEAR.asMilliseconds() == " + nf.format(YEAR.asMilliseconds()) + ", max = " + nf.format(YEAR.maxDuration())); System.out.println("DECENNIUM.asMilliseconds() == " + nf.format(DECENNIUM.asMilliseconds()) + ", max = " + nf.format(DECENNIUM.maxDuration())); System.out.println("CENTURY.asMilliseconds() == " + nf.format(CENTURY.asMilliseconds()) + ", max = " + nf.format(CENTURY.maxDuration())); System.out.println("MILLENNIUM.asMilliseconds() == " + nf.format(MILLENNIUM.asMilliseconds()) + ", max = " + nf.format(MILLENNIUM.maxDuration())); System.out.println("Integer MAX = " + nf.format(Integer.MAX_VALUE)); // System.out.println("Age of the universe = " + new Duration((long)14E9, YEAR)); too big Duration maxDuration = new Duration(Long.MAX_VALUE, MILLISECOND); assertInvariants(maxDuration); } public void assertInvariants(Unit u, long expectedMs) { // NumberFormat nf = NumberFormat.getIntegerInstance(new Locale("nl", "BE")); // System.out.println("Asserting " + u); // System.out.println(" " + u.toString() + ".asMilliseconds() == " + nf.format(u.asMilliseconds())); // System.out.println(" expectedMs == " + nf.format(expectedMs)); assertEquals(expectedMs, u.asMilliseconds()); assertTrue(u.asMilliseconds() > 0); } @Test public void testUnit() { // NumberFormat nf = NumberFormat.getIntegerInstance(new Locale("nl", "BE")); // System.out.println("Long MAX = " + nf.format(Long.MAX_VALUE)); assertInvariants(MILLISECOND, 1); assertInvariants(SECOND, 1000L); assertInvariants(MINUTE, 60L * SECOND.asMilliseconds()); assertInvariants(HOUR, 60L * MINUTE.asMilliseconds()); assertInvariants(DAY, 24L * HOUR.asMilliseconds()); assertInvariants(WEEK, 7L * DAY.asMilliseconds()); assertInvariants(MONTH, 30L * DAY.asMilliseconds()); assertInvariants(QUARTER, 3L * MONTH.asMilliseconds()); assertInvariants(YEAR, 365L * DAY.asMilliseconds()); assertInvariants(DECENNIUM, 10L * YEAR.asMilliseconds() + 2L * DAY.asMilliseconds()); assertInvariants(CENTURY, 100L * YEAR.asMilliseconds() + 24L *DAY.asMilliseconds()); assertInvariants(MILLENNIUM, 1000L * YEAR.asMilliseconds() + 241L * DAY.asMilliseconds()); } public final static long[] LONGS = {0L, 1L, 10L, 1000L, 3600000L, MILLENNIUM.asMilliseconds(), Long.MAX_VALUE}; public List<Duration> $subjects; @Before public void before() { $subjects = new ArrayList<Duration>(); for (long lS : LONGS) { for (Duration.Unit uS : Duration.Unit.values()) { if (lS <= uS.maxDuration()) { Duration subject = new Duration(lS, uS); $subjects.add(subject); } } } } @After public void after() { $subjects = null; } private void assertInvariants(Duration subject) { // NumberFormat nf = NumberFormat.getNumberInstance(new Locale("nl", "BE")); // System.out.println(" subject = " + subject); for (Unit u : Unit.values()) { // System.out.println(" as " + u + " = " + nf.format(subject.as(u))); assertTrue(subject.as(u) >= 0); } } private void testDurationLongUnit(long l, Unit u) { if (u == null || l <= u.maxDuration()) { Duration subject = new Duration(l, u); long expected = (l == 0 ? 0 : l * u.asMilliseconds()); // System.out.println(l + " " + u + ": " + subject.as(MILLISECOND) + " " + expected); assertInvariants(subject); assertTrue(subject.as(MILLISECOND) == expected); assertEquals(expected, subject.as(MILLISECOND), expected / 10E6); } // else, no Duration possible } @Test public void testDurationLongUnit() { testDurationLongUnit(0, null); for (long l : LONGS) { for (Duration.Unit u : Duration.Unit.values()) { testDurationLongUnit(l, u); } } } public static _Contract_ImmutableValue CONTRACT; static { try { CONTRACT = (_Contract_ImmutableValue)contractFor(ImmutableValue.class); } catch (NoSuchContractException exc) { unexpectedException(exc); } } public void testEqualsObject(Duration subject, Object other) { boolean result = subject.equals(other); CONTRACT.assertEqualsObject(subject, other, result); if (other != null && other instanceof Duration) { boolean expected = (subject.as(MILLISECOND) == ((Duration)other).as(MILLISECOND)); assertEquals(expected, result); CONTRACT.assertInvariants(subject); } } @Test public void testAsMillisecond() { for (Duration subject : $subjects) { long result = subject.asMillisecond(); float expected = subject.as(MILLISECOND); assertEquals(expected, result, expected / 1E6); CONTRACT.assertInvariants(subject); } } @Test public void testEqualsObject() { for (Duration subject : $subjects) { testEqualsObject(subject, null); testEqualsObject(subject, new Object()); for (Duration other : $subjects) { testEqualsObject(subject, other); } } } @Test public void testHashCode() { for (Duration subject : $subjects) { subject.hashCode(); CONTRACT.assertInvariants(subject); } } @Test public void testToString() { for (Duration subject : $subjects) { subject.toString(); CONTRACT.assertInvariants(subject); } } @Test public void testCompareTo() { for (Duration subject : $subjects) { testCompareTo(subject, null); for (Duration other : $subjects) { testCompareTo(subject, other); } } } private void testCompareTo(Duration subject, Duration other) { int result = subject.compareTo(other); assertTrue(result == 0 ? subject.equals(other) : true); int expected = ((other == null || subject.as(MILLISECOND) < other.as(MILLISECOND)) ? -1 : (subject.as(MILLISECOND) == other.as(MILLISECOND) ? 0 : +1)); assertEquals(expected, result); CONTRACT.assertInvariants(subject); if (other != null) { CONTRACT.assertInvariants(other); } } @Test public void testSum() { Duration result1 = Duration.sum(); assertNotNull(result1); assertEquals(0, result1.asMillisecond()); for (Duration subject : $subjects) { Duration result2 = sum(subject); assertNotNull(result2); assertEquals(subject, result2); for (Duration other : $subjects) { if (subject.asMillisecond() <= Long.MAX_VALUE - other.asMillisecond()) { Duration result3 = sum(subject, other); assertNotNull(result3); long expected = subject.asMillisecond() + other.asMillisecond(); assertEquals(expected, result3.asMillisecond()); } } } } @Test public void testDelta() { for (Duration subject : $subjects) { for (Duration other : $subjects) { testDelta(subject, other); testDelta(other, subject); } } } private void testDelta(Duration subject, Duration other) { Duration result = delta(subject, other); assertNotNull(result); long expected = Math.abs(subject.asMillisecond() - other.asMillisecond()); assertEquals(expected, result.asMillisecond()); } @Test public void testTimes() { for (Duration subject : $subjects) { for (long l : LONGS) { if ((l < Integer.MAX_VALUE) && (l >= 0) && (l == 0 || subject.asMillisecond() <= Long.MAX_VALUE / l)) { Duration result = subject.times((int)l); assertNotNull(result); long expected = subject.asMillisecond() * l; assertEquals(expected, result.asMillisecond()); } } } } @Test public void testDiv() { for (Duration subject : $subjects) { for (long l : LONGS) { if ((l < Integer.MAX_VALUE) && (l > 0)) { Duration result = subject.div((int)l); assertNotNull(result); long expected = subject.asMillisecond() / l; assertEquals(expected, result.asMillisecond()); } } } } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: BeanPlayer.proto package org.okraAx.v3.beans.player; /** * Protobuf type {@code org.okraAx.v3.CallbackLoginBean} */ public final class CallbackLoginBean extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:org.okraAx.v3.CallbackLoginBean) CallbackLoginBeanOrBuilder { // Use CallbackLoginBean.newBuilder() to construct. private CallbackLoginBean(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CallbackLoginBean() { openId_ = ""; } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private CallbackLoginBean( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!input.skipField(tag)) { done = true; } break; } case 10: { java.lang.String s = input.readStringRequireUtf8(); openId_ = s; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.okraAx.v3.beans.player.GpcBnPlayer.internal_static_org_okraAx_v3_CallbackLoginBean_descriptor; } protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.okraAx.v3.beans.player.GpcBnPlayer.internal_static_org_okraAx_v3_CallbackLoginBean_fieldAccessorTable .ensureFieldAccessorsInitialized( org.okraAx.v3.beans.player.CallbackLoginBean.class, org.okraAx.v3.beans.player.CallbackLoginBean.Builder.class); } public static final int OPENID_FIELD_NUMBER = 1; private volatile java.lang.Object openId_; /** * <code>optional string openId = 1;</code> */ public java.lang.String getOpenId() { java.lang.Object ref = openId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); openId_ = s; return s; } } /** * <code>optional string openId = 1;</code> */ public com.google.protobuf.ByteString getOpenIdBytes() { java.lang.Object ref = openId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); openId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!getOpenIdBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, openId_); } } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!getOpenIdBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, openId_); } memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.okraAx.v3.beans.player.CallbackLoginBean)) { return super.equals(obj); } org.okraAx.v3.beans.player.CallbackLoginBean other = (org.okraAx.v3.beans.player.CallbackLoginBean) obj; boolean result = true; result = result && getOpenId() .equals(other.getOpenId()); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (37 * hash) + OPENID_FIELD_NUMBER; hash = (53 * hash) + getOpenId().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.okraAx.v3.beans.player.CallbackLoginBean parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.okraAx.v3.beans.player.CallbackLoginBean parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.okraAx.v3.beans.player.CallbackLoginBean parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.okraAx.v3.beans.player.CallbackLoginBean parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.okraAx.v3.beans.player.CallbackLoginBean parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.okraAx.v3.beans.player.CallbackLoginBean parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.okraAx.v3.beans.player.CallbackLoginBean parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.okraAx.v3.beans.player.CallbackLoginBean parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.okraAx.v3.beans.player.CallbackLoginBean parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.okraAx.v3.beans.player.CallbackLoginBean parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.okraAx.v3.beans.player.CallbackLoginBean prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code org.okraAx.v3.CallbackLoginBean} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:org.okraAx.v3.CallbackLoginBean) org.okraAx.v3.beans.player.CallbackLoginBeanOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.okraAx.v3.beans.player.GpcBnPlayer.internal_static_org_okraAx_v3_CallbackLoginBean_descriptor; } protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.okraAx.v3.beans.player.GpcBnPlayer.internal_static_org_okraAx_v3_CallbackLoginBean_fieldAccessorTable .ensureFieldAccessorsInitialized( org.okraAx.v3.beans.player.CallbackLoginBean.class, org.okraAx.v3.beans.player.CallbackLoginBean.Builder.class); } // Construct using org.okraAx.v3.beans.player.CallbackLoginBean.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); openId_ = ""; return this; } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.okraAx.v3.beans.player.GpcBnPlayer.internal_static_org_okraAx_v3_CallbackLoginBean_descriptor; } public org.okraAx.v3.beans.player.CallbackLoginBean getDefaultInstanceForType() { return org.okraAx.v3.beans.player.CallbackLoginBean.getDefaultInstance(); } public org.okraAx.v3.beans.player.CallbackLoginBean build() { org.okraAx.v3.beans.player.CallbackLoginBean result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.okraAx.v3.beans.player.CallbackLoginBean buildPartial() { org.okraAx.v3.beans.player.CallbackLoginBean result = new org.okraAx.v3.beans.player.CallbackLoginBean(this); result.openId_ = openId_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.okraAx.v3.beans.player.CallbackLoginBean) { return mergeFrom((org.okraAx.v3.beans.player.CallbackLoginBean)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.okraAx.v3.beans.player.CallbackLoginBean other) { if (other == org.okraAx.v3.beans.player.CallbackLoginBean.getDefaultInstance()) return this; if (!other.getOpenId().isEmpty()) { openId_ = other.openId_; onChanged(); } onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.okraAx.v3.beans.player.CallbackLoginBean parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.okraAx.v3.beans.player.CallbackLoginBean) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private java.lang.Object openId_ = ""; /** * <code>optional string openId = 1;</code> */ public java.lang.String getOpenId() { java.lang.Object ref = openId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); openId_ = s; return s; } else { return (java.lang.String) ref; } } /** * <code>optional string openId = 1;</code> */ public com.google.protobuf.ByteString getOpenIdBytes() { java.lang.Object ref = openId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); openId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>optional string openId = 1;</code> */ public Builder setOpenId( java.lang.String value) { if (value == null) { throw new NullPointerException(); } openId_ = value; onChanged(); return this; } /** * <code>optional string openId = 1;</code> */ public Builder clearOpenId() { openId_ = getDefaultInstance().getOpenId(); onChanged(); return this; } /** * <code>optional string openId = 1;</code> */ public Builder setOpenIdBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); openId_ = value; onChanged(); return this; } public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return this; } public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return this; } // @@protoc_insertion_point(builder_scope:org.okraAx.v3.CallbackLoginBean) } // @@protoc_insertion_point(class_scope:org.okraAx.v3.CallbackLoginBean) private static final org.okraAx.v3.beans.player.CallbackLoginBean DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.okraAx.v3.beans.player.CallbackLoginBean(); } public static org.okraAx.v3.beans.player.CallbackLoginBean getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CallbackLoginBean> PARSER = new com.google.protobuf.AbstractParser<CallbackLoginBean>() { public CallbackLoginBean parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new CallbackLoginBean(input, extensionRegistry); } }; public static com.google.protobuf.Parser<CallbackLoginBean> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CallbackLoginBean> getParserForType() { return PARSER; } public org.okraAx.v3.beans.player.CallbackLoginBean getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
import javax.swing.JOptionPane; public class Menu { private String opcionMenuString; private int opcionMenu; private int turnosJugados = 1; private int turnosParaJugar = 9; private int delay = 25000; private boolean posicionMarcada; private boolean marcaValida; private boolean jugadorGana; Jugador jugador1 = new Jugador(); Jugador jugador2 = new Jugador(); Juego juego = new Juego(); public void getMenu() { opcionMenuString = JOptionPane.showInputDialog(null, "Elija un numero correspondiente a una opcion\n1. Jugar de nuevo\n2. Cambiar el Jugador 1\n3. Cambiar el jugador 2\n4. Salir del juego\n(Debe ingresar un valor entero del 1 al 4)"); try { // Convertir el valor de opcionMenu opcionMenu = Integer.parseInt(opcionMenuString); if(opcionMenu >= 1 && opcionMenu <= 4) { setOpcionMenu(opcionMenu); }else{ JOptionPane.showMessageDialog(null, "Opcion de menu fuera de rango, seleccione de nuevo", "Mensaje de Error", JOptionPane.ERROR_MESSAGE); getMenu(); } }catch(NumberFormatException e){ JOptionPane.showMessageDialog(null, "El valor que ingreso no es un numero, por favor intente de nuevo", "Mensaje de Error", JOptionPane.ERROR_MESSAGE); System.out.println(" Error al convertir a Entero: Valor ingresado no es numero"); // Si se comenta la siguiente linea y se descomenta getMenu() generara problemas, ya que el usuario si hace click en CANCEL, no podra salir nunca a menos que agrege el valor necesario para salirDeSistema. System.exit(1); // Puede descomentarse la siguiente funcion para hacer que si se equivoca vuelve a llamar al menu, pero debido a esto, si el usuario hace click en CANCEL, nunca saldra a menos que ingrese el valor necesario para salirDelSistema. //getMenu(); } } private void setOpcionMenu(int opcionMenu) { switch (opcionMenu) { case 1: nuevoJuego(); break; case 2: // Cambiar nombre de objeto y establecer estadisticas en 0 cambiarJugadorUno(); // Retornar al menu para continuar App getMenu(); break; case 3: // Cambiar nombre de objeto y establecer estadisticas en 0 cambiarJugadorDos(); // Retornar al menu para continuar App getMenu(); break; case 4: salirJuego(); break; } } private void nuevoJuego(){ System.out.println("Iniciando juego"); JOptionPane.showMessageDialog(null, "Lea las instrucciones que apareceran impresas en la consola,\ntendra 30 segundos para leer", "Antes de comenzar", JOptionPane.ERROR_MESSAGE); // Instrucciones de juego juego.instruccionesMarcadoDeTablero(); delayTimer(); juego.setTablero(); while(turnosJugados <= turnosParaJugar) { if((turnosJugados % 2) != 0){ // Continua jugador 1 JOptionPane.showMessageDialog(null, "Jugador 1: Su turno, defina las coordenadas para marcar\nPodra ver el tablero de juego en la Consola", "Jugador", JOptionPane.PLAIN_MESSAGE); // tablero antes de marcado System.out.println("Tablero antes de que " + jugador1.getNombreJugador() + " marque."); juego.imprimirTablero(juego.getTablero()); System.out.println(); do{ juego.setFila(jugador1.getNombreJugador()); juego.setColumna(jugador1.getNombreJugador()); boolean marcaValida = juego.verificadorDeMarca(jugador1.getValorDeJugador()); // Mensaje a jugador si la marca ya esta ocupada if(marcaValida == false) { JOptionPane.showMessageDialog(null, "La marca ya existe.\nDebera volver a intentarlo de nuevo.", "Oops!", JOptionPane.ERROR_MESSAGE); turnosJugados = turnosJugados - 1; } } while (marcaValida != false); System.out.println("Tablero despues de que " + jugador1.getNombreJugador() + " marca."); juego.imprimirTablero(juego.getTablero()); System.out.println(); jugadorGana = juego.determinarSiGana(juego.getTablero(),jugador1.getValorDeGaneJugador()); if(jugadorGana == true) { jugador1.setPartidasGanadas(); jugador2.setPartidasPerdidas(); juego.mensajeDeGanador(jugador1.getNombreJugador()); System.out.println(); System.out.println("Tablero ganador"); juego.imprimirTablero(juego.getTablero()); System.out.println(); turnosJugados = 1; delayTimer(); getMenu(); } }else{ // Continua jugador 2 JOptionPane.showMessageDialog(null, "Jugador 2: Su turno, defina las coordenadas para marcar\nPodra ver el tablero de juego en la Consola", "Jugador", JOptionPane.PLAIN_MESSAGE); // tablero antes de marcado System.out.println("Tablero antes de que " + jugador2.getNombreJugador() + " marque."); juego.imprimirTablero(juego.getTablero()); System.out.println(); do{ juego.setFila(jugador2.getNombreJugador()); juego.setColumna(jugador2.getNombreJugador()); boolean marcaValida = juego.verificadorDeMarca(jugador2.getValorDeJugador()); // Mensaje a jugador si la marca ya esta ocupada if(marcaValida == false) { JOptionPane.showMessageDialog(null, "La marca ya existe.\nDebera volver a intentarlo de nuevo.", "Oops!", JOptionPane.ERROR_MESSAGE); turnosJugados = turnosJugados - 1; } } while (marcaValida != false); System.out.println("Tablero despues de que " + jugador2.getNombreJugador() + " marca."); juego.imprimirTablero(juego.getTablero()); System.out.println(); jugadorGana = juego.determinarSiGana(juego.getTablero(),jugador2.getValorDeGaneJugador()); if(jugadorGana == true) { jugador2.setPartidasGanadas(); jugador1.setPartidasPerdidas(); juego.mensajeDeGanador(jugador2.getNombreJugador()); System.out.println(); System.out.println("Tablero ganador"); juego.imprimirTablero(juego.getTablero()); System.out.println(); turnosJugados = 1; delayTimer(); getMenu(); } } turnosJugados++; } // Si se llega a este punto: aumentar Empates a jugador1 y jugador2 // testear quitando o manteniendo la siguiente linea turnosJugados = 1; jugador1.setPartidasEmpatadas(); jugador2.setPartidasEmpatadas(); } private void cambiarJugadorUno() { // Nombrede Jugador 1 cuando comienza el sistema System.out.println("Nombre de Jugador 1 al iniciar sistema: " + jugador1.getNombreJugador()); // Obtener el nuevo nombre del objeto String nombreDeJugador1 = JOptionPane.showInputDialog(null, "Nombre nuevo de jugador 1:"); // Evalua si el nombre esta vacio if((nombreDeJugador1 == null) || (nombreDeJugador1.isEmpty())) { JOptionPane.showMessageDialog(null, "No ingreso un nombre valido o no ingreso un nombre para Jugador 1", "Mensaje de Error", JOptionPane.ERROR_MESSAGE); cambiarJugadorUno(); }else { // Objeto se reseteara valores numericos en 0 y nombre a el ingresado en la line a anterior jugador1 = new Jugador(nombreDeJugador1, 0, 0, 0); // Historial para consola, imprime el jugador almacenado en el objeto System.out.println("Nombre nuevo de jugador 1 en DB es: " + jugador1.getNombreJugador()); } } private void cambiarJugadorDos() { // Nombrede Jugador 2 cuando comienza el sistema System.out.println("Nombre de Jugador 2 al iniciar sistema: " + jugador2.getNombreJugador()); // Obtener el nuevo nombre del objeto String nombreDeJugador2 = JOptionPane.showInputDialog(null, "Nombre nuevo de jugador 2:"); // Evalua si el nombre esta vacio if((nombreDeJugador2 == null) || (nombreDeJugador2.isEmpty())) { JOptionPane.showMessageDialog(null, "No ingreso un nombre valido o no ingreso un nombre para Jugador 2", "Mensaje de Error", JOptionPane.ERROR_MESSAGE); cambiarJugadorDos(); }else { // Objeto se reseteara valores numericos en 0 y nombre a el ingresado en la line a anterior jugador2 = new Jugador(nombreDeJugador2, 0, 0, 0); // Historial para consola, imprime el jugador almacenado en el objeto System.out.println("Nombre nuevo de jugador 2 en DB es: " + jugador2.getNombreJugador()); } } private void salirJuego() { System.exit(1); } public void mensajeBienvenida() { JOptionPane.showMessageDialog(null, "Bienvenidos al juego de Gato", "Lab: 10 - Juego Gato", JOptionPane.INFORMATION_MESSAGE); } private void delayTimer() { try{ Thread.sleep(this.delay); }catch(InterruptedException e){ System.out.println(e); } } public void establecerJugadores() { // Preparacion de jugador 1 // cambiarJugadorUno(); jugador1 = new Jugador("Jugador 1"); // Establece el valor del jugador para marcar jugador1.setValorDeJugador(1); // Establece el valor para ganar del jugador jugador1.setValorDeGaneJugador(3); // Preparacion de jugador 2 // cambiarJugadorDos(); jugador2 = new Jugador("Jugador 2"); // Establece el valor del jugador para marcar jugador2.setValorDeJugador(5); // Establece el valor para ganar del jugador jugador2.setValorDeGaneJugador(15); } }
/* * Copyright 2015-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.jvm.java.intellij; import com.facebook.buck.android.AndroidBinaryDescription; import com.facebook.buck.android.AndroidLibraryDescription; import com.facebook.buck.android.AndroidResourceDescription; import com.facebook.buck.android.RobolectricTestDescription; import com.facebook.buck.cxx.CxxLibraryDescription; import com.facebook.buck.jvm.java.JavaLibraryDescription; import com.facebook.buck.jvm.java.JavaTestDescription; import com.facebook.buck.jvm.java.JvmLibraryArg; import com.facebook.buck.model.BuildTarget; import com.facebook.buck.model.HasBuildTarget; import com.facebook.buck.rules.BuildRuleType; import com.facebook.buck.rules.PathSourcePath; import com.facebook.buck.rules.SourcePath; import com.facebook.buck.rules.TargetNode; import com.google.common.base.Function; import com.google.common.base.Optional; import com.google.common.base.Preconditions; import com.google.common.base.Predicate; import com.google.common.collect.FluentIterable; import com.google.common.collect.HashMultimap; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMultimap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedSet; import com.google.common.collect.Multimap; import com.google.common.collect.Ordering; import java.nio.file.Path; import java.nio.file.Paths; import java.util.Collection; import java.util.HashMap; import java.util.Map; /** * Builds {@link IjModule}s out of {@link TargetNode}s. */ public class IjModuleFactory { /** * These target types are mapped onto .iml module files. */ private static final ImmutableSet<BuildRuleType> SUPPORTED_MODULE_TYPES = ImmutableSet.of( AndroidBinaryDescription.TYPE, AndroidLibraryDescription.TYPE, AndroidResourceDescription.TYPE, CxxLibraryDescription.TYPE, JavaLibraryDescription.TYPE, JavaTestDescription.TYPE, RobolectricTestDescription.TYPE); public static final Predicate<TargetNode<?>> SUPPORTED_MODULE_TYPES_PREDICATE = new Predicate<TargetNode<?>>() { @Override public boolean apply(TargetNode<?> input) { return SUPPORTED_MODULE_TYPES.contains(input.getType()); } }; /** * Provides the {@link IjModuleFactory} with {@link Path}s to various elements of the project. */ public interface IjModuleFactoryResolver { /** * @param targetNode node to generate the path to * @return the project-relative path to a directory structure under which the R.class file can * be found (the structure will be the same as the package path of the R class). A path * should be returned only if the given TargetNode requires the R.class to compile. */ Optional<Path> getDummyRDotJavaPath(TargetNode<?> targetNode); /** * @param targetNode node describing the Android binary to get the manifest of. * @return path on disk to the AndroidManifest. */ Path getAndroidManifestPath(TargetNode<AndroidBinaryDescription.Arg> targetNode); /** * @param targetNode node describing the Android binary to get the Proguard config of. * @return path on disk to the proguard config. */ Optional<Path> getProguardConfigPath(TargetNode<AndroidBinaryDescription.Arg> targetNode); /** * @param targetNode node describing the Android resources to get the path of. * @return path on disk to the resources folder. */ Optional<Path> getAndroidResourcePath(TargetNode<AndroidResourceDescription.Arg> targetNode); /** * @param targetNode node describing the Android assets to get the path of. * @return path on disk to the assets folder. */ Optional<Path> getAssetsPath(TargetNode<AndroidResourceDescription.Arg> targetNode); /** * @param targetNode node which may use annotation processors. * @return path to the annotation processor output if any annotation proceessors are configured * for the given node. */ Optional<Path> getAnnotationOutputPath(TargetNode<? extends JvmLibraryArg> targetNode); } /** * Holds all of the mutable state required during {@link IjModule} creation. */ private static class ModuleBuildContext { private final ImmutableSet<BuildTarget> circularDependencyInducingTargets; private Optional<IjModuleAndroidFacet.Builder> androidFacetBuilder; private ImmutableSet.Builder<Path> extraClassPathDependenciesBuilder; private ImmutableSet.Builder<IjFolder> generatedSourceCodeFoldersBuilder; private Map<Path, IjFolder> sourceFoldersMergeMap; // See comment in getDependencies for these two member variables. private Map<BuildTarget, IjModuleGraph.DependencyType> dependencyTypeMap; private Multimap<Path, BuildTarget> dependencyOriginMap; public ModuleBuildContext(ImmutableSet<BuildTarget> circularDependencyInducingTargets) { this.circularDependencyInducingTargets = circularDependencyInducingTargets; this.androidFacetBuilder = Optional.absent(); this.extraClassPathDependenciesBuilder = new ImmutableSet.Builder<>(); this.generatedSourceCodeFoldersBuilder = ImmutableSet.builder(); this.sourceFoldersMergeMap = new HashMap<>(); this.dependencyTypeMap = new HashMap<>(); this.dependencyOriginMap = HashMultimap.create(); } public void ensureAndroidFacetBuilder() { if (!androidFacetBuilder.isPresent()) { androidFacetBuilder = Optional.of(IjModuleAndroidFacet.builder()); } } public IjModuleAndroidFacet.Builder getOrCreateAndroidFacetBuilder() { ensureAndroidFacetBuilder(); return androidFacetBuilder.get(); } public Optional<IjModuleAndroidFacet> getAndroidFacet() { return androidFacetBuilder.transform( new Function<IjModuleAndroidFacet.Builder, IjModuleAndroidFacet>() { @Override public IjModuleAndroidFacet apply(IjModuleAndroidFacet.Builder input) { return input.build(); } }); } public ImmutableSet<IjFolder> getSourceFolders() { return ImmutableSet.copyOf(sourceFoldersMergeMap.values()); } public void addExtraClassPathDependency(Path path) { extraClassPathDependenciesBuilder.add(path); } public ImmutableSet<Path> getExtraClassPathDependencies() { return extraClassPathDependenciesBuilder.build(); } public void addGeneratedSourceCodeFolder(IjFolder generatedFolder) { generatedSourceCodeFoldersBuilder.add(generatedFolder); } public ImmutableSet<IjFolder> getGeneratedSourceCodeFolders() { return generatedSourceCodeFoldersBuilder.build(); } /** * Adds a source folder to the context. If a folder with the same path has already been added * the types of the two folders will be merged. * * @param folder folder to add/merge. */ public void addSourceFolder(IjFolder folder) { Path path = folder.getPath(); IjFolder otherFolder = sourceFoldersMergeMap.get(path); if (otherFolder != null) { folder = folder.merge(otherFolder); } sourceFoldersMergeMap.put(path, folder); } public void addDeps( ImmutableSet<BuildTarget> buildTargets, IjModuleGraph.DependencyType dependencyType) { addDeps(ImmutableSet.<Path>of(), buildTargets, dependencyType); } public void addCompileShadowDep(BuildTarget buildTarget) { IjModuleGraph.DependencyType.putWithMerge( dependencyTypeMap, buildTarget, IjModuleGraph.DependencyType.COMPILED_SHADOW); } /** * Record a dependency on a {@link BuildTarget}. The dependency's type will be merged if * multiple {@link TargetNode}s refer to it or if multiple TargetNodes include sources from * the same directory. * * @param sourcePaths the {@link Path}s to sources which need this dependency to build. * Can be empty. * @param buildTargets the {@link BuildTarget}s to depend on * @param dependencyType what is the dependency needed for. */ public void addDeps( ImmutableSet<Path> sourcePaths, ImmutableSet<BuildTarget> buildTargets, IjModuleGraph.DependencyType dependencyType) { for (BuildTarget buildTarget : buildTargets) { if (circularDependencyInducingTargets.contains(buildTarget)) { continue; } if (sourcePaths.isEmpty()) { IjModuleGraph.DependencyType.putWithMerge(dependencyTypeMap, buildTarget, dependencyType); } else { for (Path sourcePath : sourcePaths) { dependencyOriginMap.put(sourcePath, buildTarget); } } } } public ImmutableMap<BuildTarget, IjModuleGraph.DependencyType> getDependencies() { // Some targets may introduce dependencies without contributing to the IjFolder set. These // are recorded in the dependencyTypeMap. // Dependencies associated with source paths inherit the type from the folder. This is because // IntelliJ only operates on folders and so it is impossible to distinguish between test and // production code if it's in the same folder. That in turn means test-only dependencies need // to be "promoted" to production dependencies in the above scenario to keep code compiling. // It is also possible that a target is included in both maps, in which case the type gets // merged anyway. // Merging types does not back-propagate: if TargetA depends on TargetB and the type of // TargetB has been changed that does not mean the dependency type of TargetA is changed too. Map<BuildTarget, IjModuleGraph.DependencyType> result = new HashMap<>(dependencyTypeMap); for (Path path : dependencyOriginMap.keySet()) { IjModuleGraph.DependencyType dependencyType = Preconditions.checkNotNull(sourceFoldersMergeMap.get(path)) instanceof TestFolder ? IjModuleGraph.DependencyType.TEST : IjModuleGraph.DependencyType.PROD; for (BuildTarget buildTarget : dependencyOriginMap.get(path)) { IjModuleGraph.DependencyType.putWithMerge(result, buildTarget, dependencyType); } } return ImmutableMap.copyOf(result); } } /** * Rule describing which aspects of the supplied {@link TargetNode} to transfer to the * {@link IjModule} being constructed. * * @param <T> TargetNode type. */ private interface IjModuleRule<T> { BuildRuleType getType(); void apply(TargetNode<T> targetNode, ModuleBuildContext context); } private final Map<BuildRuleType, IjModuleRule<?>> moduleRuleIndex = new HashMap<>(); private final IjModuleFactoryResolver moduleFactoryResolver; /** * @param moduleFactoryResolver see {@link IjModuleFactoryResolver}. */ public IjModuleFactory(IjModuleFactoryResolver moduleFactoryResolver) { addToIndex(new AndroidBinaryModuleRule()); addToIndex(new AndroidLibraryModuleRule()); addToIndex(new AndroidResourceModuleRule()); addToIndex(new CxxLibraryModuleRule()); addToIndex(new JavaLibraryModuleRule()); addToIndex(new JavaTestModuleRule()); addToIndex(new RobolectricTestModuleRule()); this.moduleFactoryResolver = moduleFactoryResolver; Preconditions.checkState( moduleRuleIndex.keySet().equals(SUPPORTED_MODULE_TYPES)); } private void addToIndex(IjModuleRule<?> rule) { Preconditions.checkArgument(!moduleRuleIndex.containsKey(rule.getType())); Preconditions.checkArgument(SUPPORTED_MODULE_TYPES.contains(rule.getType())); moduleRuleIndex.put(rule.getType(), rule); } /** * Create an {@link IjModule} form the supplied parameters. * * @param moduleBasePath the top-most directory the module is responsible for. * @param targetNodes set of nodes the module is to be created from. * @return nice shiny new module. */ @SuppressWarnings({"unchecked", "rawtypes"}) public IjModule createModule( Path moduleBasePath, ImmutableSet<TargetNode<?>> targetNodes) { Preconditions.checkArgument(!targetNodes.isEmpty()); ImmutableSet<BuildTarget> moduleBuildTargets = FluentIterable.from(targetNodes) .transform(HasBuildTarget.TO_TARGET) .toSet(); ModuleBuildContext context = new ModuleBuildContext(moduleBuildTargets); for (TargetNode<?> targetNode : targetNodes) { IjModuleRule<?> rule = Preconditions.checkNotNull(moduleRuleIndex.get(targetNode.getType())); rule.apply((TargetNode) targetNode, context); } return IjModule.builder() .setModuleBasePath(moduleBasePath) .setTargets(targetNodes) .addAllFolders(context.getSourceFolders()) .putAllDependencies(context.getDependencies()) .setAndroidFacet(context.getAndroidFacet()) .addAllExtraClassPathDependencies(context.getExtraClassPathDependencies()) .addAllGeneratedSourceCodeFolders(context.getGeneratedSourceCodeFolders()) .build(); } /** * Calculate the set of directories containing inputs to the target. * * @param paths inputs to a given target. * @return index of path to set of inputs in that path */ private static ImmutableMultimap<Path, Path> getSourceFoldersToInputsIndex( ImmutableSet<Path> paths) { return FluentIterable.from(paths) .index( new Function<Path, Path>() { @Override public Path apply(Path input) { Path parent = input.getParent(); if (parent == null) { return Paths.get(""); } return parent; } }); } /** * @param paths paths to check * @return whether any of the paths pointed to something not in the source tree. */ private static boolean containsNonSourcePath(Optional<? extends Iterable<SourcePath>> paths) { if (!paths.isPresent()) { return false; } return FluentIterable.from(paths.get()) .anyMatch( new Predicate<SourcePath>() { @Override public boolean apply(SourcePath input) { return !(input instanceof PathSourcePath); } }); } /** * Add the set of input paths to the {@link IjModule.Builder} as source folders. * * @param foldersToInputsIndex mapping of source folders to their inputs. * @param wantsPackagePrefix whether folders should be annotated with a package prefix. This * only makes sense when the source folder is Java source code. * @param context the module to add the folders to. */ private static void addSourceFolders( IjFolder.IJFolderFactory factory, ImmutableMultimap<Path, Path> foldersToInputsIndex, boolean wantsPackagePrefix, ModuleBuildContext context) { for (Map.Entry<Path, Collection<Path>> entry : foldersToInputsIndex.asMap().entrySet()) { context.addSourceFolder( factory.create( entry.getKey(), wantsPackagePrefix, FluentIterable.from(entry.getValue()).toSortedSet(Ordering.natural()) ) ); } } private void addDepsAndFolder( IjFolder.IJFolderFactory folderFactory, IjModuleGraph.DependencyType dependencyType, TargetNode<?> targetNode, boolean wantsPackagePrefix, ModuleBuildContext context, ImmutableSet<Path> inputPaths ) { ImmutableMultimap<Path, Path> foldersToInputsIndex = getSourceFoldersToInputsIndex(inputPaths); addSourceFolders(folderFactory, foldersToInputsIndex, wantsPackagePrefix, context); addDeps(foldersToInputsIndex, targetNode, dependencyType, context); if (targetNode.getConstructorArg() instanceof JvmLibraryArg) { addAnnotationOutputIfNeeded(folderFactory, targetNode, context); } } private void addDepsAndFolder( IjFolder.IJFolderFactory folderFactory, IjModuleGraph.DependencyType dependencyType, TargetNode<?> targetNode, boolean wantsPackagePrefix, ModuleBuildContext context ) { addDepsAndFolder( folderFactory, dependencyType, targetNode, wantsPackagePrefix, context, targetNode.getInputs()); } private void addDepsAndSources( TargetNode<?> targetNode, boolean wantsPackagePrefix, ModuleBuildContext context) { addDepsAndFolder( SourceFolder.FACTORY, IjModuleGraph.DependencyType.PROD, targetNode, wantsPackagePrefix, context); } private void addDepsAndTestSources( TargetNode<?> targetNode, boolean wantsPackagePrefix, ModuleBuildContext context) { addDepsAndFolder( TestFolder.FACTORY, IjModuleGraph.DependencyType.TEST, targetNode, wantsPackagePrefix, context); } private static void addDeps( ImmutableMultimap<Path, Path> foldersToInputsIndex, TargetNode<?> targetNode, IjModuleGraph.DependencyType dependencyType, ModuleBuildContext context) { context.addDeps( foldersToInputsIndex.keySet(), targetNode.getDeps(), dependencyType); } private static <T extends JavaLibraryDescription.Arg> void addCompiledShadowIfNeeded( TargetNode<T> targetNode, ModuleBuildContext context) { T arg = targetNode.getConstructorArg(); // TODO(marcinkosiba): investigate supporting annotation processors without resorting to this. boolean hasAnnotationProcessors = !arg.annotationProcessors.get().isEmpty(); if (containsNonSourcePath(arg.srcs) || hasAnnotationProcessors) { context.addCompileShadowDep(targetNode.getBuildTarget()); } } @SuppressWarnings("unchecked") private void addAnnotationOutputIfNeeded( IjFolder.IJFolderFactory folderFactory, TargetNode<?> targetNode, ModuleBuildContext context) { TargetNode<? extends JvmLibraryArg> jvmLibraryTargetNode = (TargetNode<? extends JvmLibraryArg>) targetNode; Optional<Path> annotationOutput = moduleFactoryResolver.getAnnotationOutputPath(jvmLibraryTargetNode); if (!annotationOutput.isPresent()) { return; } Path annotationOutputPath = annotationOutput.get(); context.addGeneratedSourceCodeFolder( folderFactory.create( annotationOutputPath, false, ImmutableSortedSet.<Path>of(annotationOutputPath)) ); } private class AndroidBinaryModuleRule implements IjModuleRule<AndroidBinaryDescription.Arg> { @Override public BuildRuleType getType() { return AndroidBinaryDescription.TYPE; } @Override public void apply(TargetNode<AndroidBinaryDescription.Arg> target, ModuleBuildContext context) { context.addDeps(target.getDeps(), IjModuleGraph.DependencyType.PROD); IjModuleAndroidFacet.Builder androidFacetBuilder = context.getOrCreateAndroidFacetBuilder(); androidFacetBuilder .setManifestPath(moduleFactoryResolver.getAndroidManifestPath(target)) .setProguardConfigPath(moduleFactoryResolver.getProguardConfigPath(target)) .setAndroidLibrary(false); } } private class AndroidLibraryModuleRule implements IjModuleRule<AndroidLibraryDescription.Arg> { @Override public BuildRuleType getType() { return AndroidLibraryDescription.TYPE; } @Override public void apply(TargetNode<AndroidLibraryDescription.Arg> target, ModuleBuildContext context) { addDepsAndSources( target, true /* wantsPackagePrefix */, context); addCompiledShadowIfNeeded(target, context); Optional<Path> dummyRDotJavaClassPath = moduleFactoryResolver.getDummyRDotJavaPath(target); if (dummyRDotJavaClassPath.isPresent()) { context.addExtraClassPathDependency(dummyRDotJavaClassPath.get()); } context.getOrCreateAndroidFacetBuilder().setAndroidLibrary(true); } } private class AndroidResourceModuleRule implements IjModuleRule<AndroidResourceDescription.Arg> { @Override public BuildRuleType getType() { return AndroidResourceDescription.TYPE; } @Override public void apply( TargetNode<AndroidResourceDescription.Arg> target, ModuleBuildContext context) { IjModuleAndroidFacet.Builder androidFacetBuilder = context.getOrCreateAndroidFacetBuilder(); androidFacetBuilder.setAndroidLibrary(true); Optional<Path> assets = moduleFactoryResolver.getAssetsPath(target); if (assets.isPresent()) { androidFacetBuilder.addAssetPaths(assets.get()); } Optional<Path> resources = moduleFactoryResolver.getAndroidResourcePath(target); ImmutableSet<Path> resourceFolders; if (resources.isPresent()) { resourceFolders = ImmutableSet.of(resources.get()); androidFacetBuilder.addAllResourcePaths(resourceFolders); for (Path resourceFolder : resourceFolders) { context.addSourceFolder( new AndroidResourceFolder(resourceFolder) ); } } else { resourceFolders = ImmutableSet.<Path>of(); } androidFacetBuilder.setPackageName(target.getConstructorArg().rDotJavaPackage); Optional<Path> dummyRDotJavaClassPath = moduleFactoryResolver.getDummyRDotJavaPath(target); if (dummyRDotJavaClassPath.isPresent()) { context.addExtraClassPathDependency(dummyRDotJavaClassPath.get()); } context.getOrCreateAndroidFacetBuilder().setAndroidLibrary(true); context.addDeps(resourceFolders, target.getDeps(), IjModuleGraph.DependencyType.PROD); } } private class CxxLibraryModuleRule implements IjModuleRule<CxxLibraryDescription.Arg> { @Override public BuildRuleType getType() { return CxxLibraryDescription.TYPE; } @Override public void apply(TargetNode<CxxLibraryDescription.Arg> target, ModuleBuildContext context) { addSourceFolders( SourceFolder.FACTORY, getSourceFoldersToInputsIndex(target.getInputs()), false /* wantsPackagePrefix */, context); } } private class JavaLibraryModuleRule implements IjModuleRule<JavaLibraryDescription.Arg> { @Override public BuildRuleType getType() { return JavaLibraryDescription.TYPE; } @Override public void apply(TargetNode<JavaLibraryDescription.Arg> target, ModuleBuildContext context) { addDepsAndSources( target, true /* wantsPackagePrefix */, context); addCompiledShadowIfNeeded(target, context); } } private class JavaTestModuleRule implements IjModuleRule<JavaTestDescription.Arg> { @Override public BuildRuleType getType() { return JavaTestDescription.TYPE; } @Override public void apply(TargetNode<JavaTestDescription.Arg> target, ModuleBuildContext context) { addDepsAndTestSources( target, true /* wantsPackagePrefix */, context); addCompiledShadowIfNeeded(target, context); } } private class RobolectricTestModuleRule extends JavaTestModuleRule { @Override public BuildRuleType getType() { return RobolectricTestDescription.TYPE; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.shiro.web.servlet; import org.apache.shiro.session.InvalidSessionException; import org.apache.shiro.session.Session; import org.apache.shiro.web.session.HttpServletSession; import javax.servlet.ServletContext; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpSession; import javax.servlet.http.HttpSessionBindingEvent; import javax.servlet.http.HttpSessionBindingListener; import java.util.*; /** * Wrapper class that uses a Shiro {@link Session Session} under the hood for all session operations instead of the * Servlet Container's session mechanism. This is required in heterogeneous client environments where the Session * is used on both the business tier as well as in multiple client technologies (web, swing, flash, etc.) since * Servlet container sessions alone cannot support this feature. * * @since 0.2 */ public class ShiroHttpSession implements HttpSession { //TODO - complete JavaDoc public static final String DEFAULT_SESSION_ID_NAME = "JSESSIONID"; private static final Enumeration EMPTY_ENUMERATION = new Enumeration() { public boolean hasMoreElements() { return false; } public Object nextElement() { return null; } }; @SuppressWarnings({"deprecation"}) private static final javax.servlet.http.HttpSessionContext HTTP_SESSION_CONTEXT = new javax.servlet.http.HttpSessionContext() { public HttpSession getSession(String s) { return null; } public Enumeration getIds() { return EMPTY_ENUMERATION; } }; protected ServletContext servletContext = null; protected HttpServletRequest currentRequest = null; protected Session session = null; //'real' Shiro Session public ShiroHttpSession(Session session, HttpServletRequest currentRequest, ServletContext servletContext) { if (session instanceof HttpServletSession) { String msg = "Session constructor argument cannot be an instance of HttpServletSession. This is enforced to " + "prevent circular dependencies and infinite loops."; throw new IllegalArgumentException(msg); } this.session = session; this.currentRequest = currentRequest; this.servletContext = servletContext; } public Session getSession() { return this.session; } public long getCreationTime() { try { return getSession().getStartTimestamp().getTime(); } catch (Exception e) { throw new IllegalStateException(e); } } public String getId() { return getSession().getId().toString(); } public long getLastAccessedTime() { return getSession().getLastAccessTime().getTime(); } public ServletContext getServletContext() { return this.servletContext; } public void setMaxInactiveInterval(int i) { try { getSession().setTimeout(i * 1000L); } catch (InvalidSessionException e) { throw new IllegalStateException(e); } } public int getMaxInactiveInterval() { try { return (new Long(getSession().getTimeout() / 1000)).intValue(); } catch (InvalidSessionException e) { throw new IllegalStateException(e); } } @SuppressWarnings({"deprecation"}) public javax.servlet.http.HttpSessionContext getSessionContext() { return HTTP_SESSION_CONTEXT; } public Object getAttribute(String s) { try { return getSession().getAttribute(s); } catch (InvalidSessionException e) { throw new IllegalStateException(e); } } public Object getValue(String s) { return getAttribute(s); } @SuppressWarnings({"unchecked"}) protected Set<String> getKeyNames() { Collection<Object> keySet; try { keySet = getSession().getAttributeKeys(); } catch (InvalidSessionException e) { throw new IllegalStateException(e); } Set<String> keyNames; if (keySet != null && !keySet.isEmpty()) { keyNames = new HashSet<String>(keySet.size()); for (Object o : keySet) { keyNames.add(o.toString()); } } else { keyNames = Collections.EMPTY_SET; } return keyNames; } public Enumeration getAttributeNames() { Set<String> keyNames = getKeyNames(); final Iterator iterator = keyNames.iterator(); return new Enumeration() { public boolean hasMoreElements() { return iterator.hasNext(); } public Object nextElement() { return iterator.next(); } }; } public String[] getValueNames() { Set<String> keyNames = getKeyNames(); String[] array = new String[keyNames.size()]; if (keyNames.size() > 0) { array = keyNames.toArray(array); } return array; } protected void afterBound(String s, Object o) { if (o instanceof HttpSessionBindingListener) { HttpSessionBindingListener listener = (HttpSessionBindingListener) o; HttpSessionBindingEvent event = new HttpSessionBindingEvent(this, s, o); listener.valueBound(event); } } protected void afterUnbound(String s, Object o) { if (o instanceof HttpSessionBindingListener) { HttpSessionBindingListener listener = (HttpSessionBindingListener) o; HttpSessionBindingEvent event = new HttpSessionBindingEvent(this, s, o); listener.valueUnbound(event); } } public void setAttribute(String s, Object o) { try { getSession().setAttribute(s, o); afterBound(s, o); } catch (InvalidSessionException e) { //noinspection finally try { afterUnbound(s, o); } finally { //noinspection ThrowFromFinallyBlock throw new IllegalStateException(e); } } } public void putValue(String s, Object o) { setAttribute(s, o); } public void removeAttribute(String s) { try { Object attribute = getSession().removeAttribute(s); afterUnbound(s, attribute); } catch (InvalidSessionException e) { throw new IllegalStateException(e); } } public void removeValue(String s) { removeAttribute(s); } public void invalidate() { try { getSession().stop(); } catch (InvalidSessionException e) { throw new IllegalStateException(e); } } public boolean isNew() { Boolean value = (Boolean) currentRequest.getAttribute(ShiroHttpServletRequest.REFERENCED_SESSION_IS_NEW); return value != null && value.equals(Boolean.TRUE); } }
package silverscript.parser; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import silverscript.evaluator.functions.Function; import silverscript.evaluator.functions.Print; import silverscript.parser.lexer.Lexer; import silverscript.tokens.Token; import silverscript.tokens.TokenType; public class Parser { private static Map<String, Object> identifierMap = new HashMap<String, Object>(); private static Map<String, Function> functionMap = new HashMap<String, Function>(); public static void registerFunctions() { Function print = new Print(); functionMap.put(print.getFunctionID(), print); } public static void mmap() { Iterator<Entry<String, Object>> i = identifierMap.entrySet().iterator(); while(i.hasNext()) { Map.Entry<String, Object> pair = (Map.Entry<String, Object>)i.next(); System.out.println("MEM: [" + pair.getKey() + ", " + pair.getValue() + "]"); } } public static void addIdentifier(Identifier identifier) { if(identifier != null) identifierMap.put(identifier.getIdentifier(), identifier.getValue()); } public static boolean semanticAnalysis(List<Token> tokenList) throws SemanticException { Token[] tokens = tokenList.toArray(new Token[tokenList.size()]); // Check if is an expression. // Check if it is a valid expression. // Check if is an equation. // Check if it is a valid equation. if(Lexer.isEquation(tokenList)) if(SemanticAnalyser.isValidEquation(tokens)) return true; /* True if is a valid equation. */ else return false; /* False if is an invalid equation. */ else if(Lexer.isExpression(tokenList)) if(SemanticAnalyser.isValidExpression(tokens, 0, null)) return true; /* True if is a valid expression. */ else return false; /* False if is an invalid expression. */ else if(Lexer.isFunction(tokenList)) return true; else return false; /* False if does not fit criteria. */ } /* * Parsing: * * is an equation: * get the identifiers pointer. * make it root of parse tree. * from root, on the left all numbers. * from root, on the right all operators. * return to evaluate. */ public static ParsedObject parseTokenList(Token[] tokens, int offset, Token lastToken) throws ParseException { ParsedObject parsedObject = new ParsedObject(); if(tokens[offset].getType().equals(TokenType.FUNCTION)) { parsedObject.combineParsedObjects(parseFunction(tokens)); } if(lastToken == null) { if(tokens[offset].getType().equals(TokenType.IDENTIFIER)) { int identifierPointer = offset; int identifierValue = offset + 2; if(tokens[identifierValue].getType().equals(TokenType.NUMBER)) identifierMap.put(tokens[identifierPointer].getValue().toString(), tokens[identifierValue].getValue()); else if(tokens[identifierValue].getType().equals(TokenType.IDENTIFIER)) identifierMap.put(tokens[identifierPointer].getValue().toString(), identifierMap.get(tokens[identifierValue].getValue().toString())); if(!identifierMap.containsKey(tokens[offset])) parsedObject.addIdentifier(new Identifier(tokens[identifierPointer].getValue().toString(), tokens[identifierValue].getValue())); else parsedObject.addToken(new Token(identifierMap.get(tokens[identifierValue].getValue().toString()), TokenType.NUMBER)); ++offset; ++offset; if(tokens.length - 1 >= offset + 1) parsedObject.combineParsedObjects(parseTokenList(tokens, ++offset, tokens[offset - 1])); else return parsedObject; } else if(tokens[offset].getType().equals(TokenType.KEYWORD)) { /* * Check which keyword, then generate a parsedObject for every iteration * if it is a loop. * Else do what the keyword specifies. */ } else throw new ParseException("Unable to create a parse object."); } else if(lastToken != null) { if(lastToken.getType().equals(TokenType.IDENTIFIER) || lastToken.getType().equals(TokenType.NUMBER)) { if(tokens[offset].getType().equals(TokenType.OPERATOR)) { ++offset; //Increment offset if(tokens[offset].getType().equals(TokenType.IDENTIFIER)) { parsedObject.addToken(tokens[offset - 1]); // Add operator // parsedObject.addToken(tokens[offset]); // Add identifier parsedObject.addToken(new Token(identifierMap.get(tokens[offset].getValue().toString()), TokenType.NUMBER)); // Substitute identifier for a literal. if(tokens.length - 1 >= offset + 1) parsedObject.combineParsedObjects(parseTokenList(tokens, ++offset, tokens[offset - 1])); else return parsedObject; } else if(tokens[offset].getType().equals(TokenType.CONSTANT)) { parsedObject.addToken(tokens[offset - 1]); // Add operator parsedObject.addToken(tokens[offset]); // Add constant if(tokens.length - 1 >= offset + 1) parsedObject.combineParsedObjects(parseTokenList(tokens, ++offset, tokens[offset - 1])); else return parsedObject; } else if(tokens[offset].getType().equals(TokenType.NUMBER)) { parsedObject.addToken(tokens[offset - 1]); // Add operator parsedObject.addToken(tokens[offset]); // Add number if(tokens.length - 1 >= offset + 1) parsedObject.combineParsedObjects(parseTokenList(tokens, ++offset, tokens[offset - 1])); else return parsedObject; } } else //if not followed by an operator throw new ParseException("Unable to create a parse object."); } else if(lastToken.getType().equals(TokenType.OPERATOR)) { if(tokens[offset].getType().equals(TokenType.IDENTIFIER)) { parsedObject.addToken(lastToken); // Add identifier. parsedObject.addToken(new Token(tokens[offset + 2].getValue(), tokens[offset + 2].getType())); // Substitute identifier for a literal. if(tokens.length - 1 >= offset + 1) parsedObject.combineParsedObjects(parseTokenList(tokens, ++offset, tokens[offset - 1])); else return parsedObject; } else if(tokens[offset].getType().equals(TokenType.CONSTANT)) { parsedObject.addToken(lastToken); parsedObject.addToken(tokens[offset]); if(tokens.length - 1 >= offset + 1) parsedObject.combineParsedObjects(parseTokenList(tokens, ++offset, tokens[offset - 1])); else return parsedObject; } else if(tokens[offset].getType().equals(TokenType.NUMBER)) { parsedObject.addToken(lastToken); parsedObject.addToken(tokens[offset]); if(tokens.length - 1 >= offset + 1) parsedObject.combineParsedObjects(parseTokenList(tokens, ++offset, tokens[offset - 1])); else return parsedObject; } else throw new ParseException("Unable to create a parse object."); } else throw new ParseException("Unable to create a parse object."); } return parsedObject; } public static ParsedObject parseFunction(Token[] tokens) throws ParseException { ParsedObject parsedObject = new ParsedObject(); int offset = 0; if(Lexer.isFunction(tokens[offset].getValue().toString())) { String functionID = tokens[0].getValue().toString(); List<Token> reqTokens = new ArrayList<Token>(); Token tmp = new Token(); if(tokens[++offset].getValue().equals("(")) { while(!tmp.getValue().equals(")")) { ++offset; tmp = tokens[offset]; if(!tokens[offset].getType().equals(TokenType.DELIMITER) || !tokens[offset].getType().equals(TokenType.KEYWORD)) { reqTokens.add(tokens[offset]); if(tokens.length - 1 >= offset + 1) if(tokens[offset + 1].getType().equals(TokenType.DELIMITER)) continue; else throw new ParseException("Could not create a parsed object."); else parsedObject.addFunctionToMap(functionMap.get(functionID).initFunction(reqTokens.toArray(new Token[reqTokens.size()]))); // parsedObject.addFunction(functionMap.get(functionID).initFunction(reqTokens.toArray(new Token[reqTokens.size()]))); } } } } else return parsedObject; return parsedObject; } @SuppressWarnings("unused") private static Token getOperator(List<Token> tokenList) { Token operator = null; for(Token token : tokenList) { if(token.getType().equals(TokenType.OPERATOR)) return token; } return operator; } // @SuppressWarnings("unused") // private static Token getNumber(List<Token> tokenList) throws ParseException // { // boolean opFound = false; // // for(Token token : tokenList) // { // if(!opFound) // { // if(token.getType().equals(TokenType.OPERATOR)) // continue; // } // else // throw new ParseException("Operator was not found."); // // if(token.getType().equals(TokenType.NUMBER)) // return token; // } // // return null; // } }
/* * Copyright (c) 1996, 2005, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ /* * (C) Copyright Taligent, Inc. 1996, 1997 - All Rights Reserved * (C) Copyright IBM Corp. 1996 - 1998 - All Rights Reserved * * The original version of this source code and documentation is copyrighted * and owned by Taligent, Inc., a wholly-owned subsidiary of IBM. These * materials are provided under terms of a License Agreement between Taligent * and Sun. This technology is protected by multiple US and International * patents. This notice and attribution to Taligent may not be removed. * Taligent is a registered trademark of Taligent, Inc. * */ package java.text; import java.io.InvalidObjectException; import java.io.IOException; import java.io.ObjectInputStream; import java.util.Arrays; /** * A <code>ChoiceFormat</code> allows you to attach a format to a range of numbers. * It is generally used in a <code>MessageFormat</code> for handling plurals. * The choice is specified with an ascending list of doubles, where each item * specifies a half-open interval up to the next item: * <blockquote> * <pre> * X matches j if and only if limit[j] &lt;= X &lt; limit[j+1] * </pre> * </blockquote> * If there is no match, then either the first or last index is used, depending * on whether the number (X) is too low or too high. If the limit array is not * in ascending order, the results of formatting will be incorrect. ChoiceFormat * also accepts <code>&#92;u221E</code> as equivalent to infinity(INF). * * <p> * <strong>Note:</strong> * <code>ChoiceFormat</code> differs from the other <code>Format</code> * classes in that you create a <code>ChoiceFormat</code> object with a * constructor (not with a <code>getInstance</code> style factory * method). The factory methods aren't necessary because <code>ChoiceFormat</code> * doesn't require any complex setup for a given locale. In fact, * <code>ChoiceFormat</code> doesn't implement any locale specific behavior. * * <p> * When creating a <code>ChoiceFormat</code>, you must specify an array of formats * and an array of limits. The length of these arrays must be the same. * For example, * <ul> * <li> * <em>limits</em> = {1,2,3,4,5,6,7}<br> * <em>formats</em> = {"Sun","Mon","Tue","Wed","Thur","Fri","Sat"} * <li> * <em>limits</em> = {0, 1, ChoiceFormat.nextDouble(1)}<br> * <em>formats</em> = {"no files", "one file", "many files"}<br> * (<code>nextDouble</code> can be used to get the next higher double, to * make the half-open interval.) * </ul> * * <p> * Here is a simple example that shows formatting and parsing: * <blockquote> * <pre> * double[] limits = {1,2,3,4,5,6,7}; * String[] dayOfWeekNames = {"Sun","Mon","Tue","Wed","Thur","Fri","Sat"}; * ChoiceFormat form = new ChoiceFormat(limits, dayOfWeekNames); * ParsePosition status = new ParsePosition(0); * for (double i = 0.0; i &lt;= 8.0; ++i) { * status.setIndex(0); * System.out.println(i + " -&gt; " + form.format(i) + " -&gt; " * + form.parse(form.format(i),status)); * } * </pre> * </blockquote> * Here is a more complex example, with a pattern format: * <blockquote> * <pre> * double[] filelimits = {0,1,2}; * String[] filepart = {"are no files","is one file","are {2} files"}; * ChoiceFormat fileform = new ChoiceFormat(filelimits, filepart); * Format[] testFormats = {fileform, null, NumberFormat.getInstance()}; * MessageFormat pattform = new MessageFormat("There {0} on {1}"); * pattform.setFormats(testFormats); * Object[] testArgs = {null, "ADisk", null}; * for (int i = 0; i &lt; 4; ++i) { * testArgs[0] = new Integer(i); * testArgs[2] = testArgs[0]; * System.out.println(pattform.format(testArgs)); * } * </pre> * </blockquote> * <p> * Specifying a pattern for ChoiceFormat objects is fairly straightforward. * For example: * <blockquote> * <pre> * ChoiceFormat fmt = new ChoiceFormat( * "-1#is negative| 0#is zero or fraction | 1#is one |1.0&lt;is 1+ |2#is two |2&lt;is more than 2."); * System.out.println("Formatter Pattern : " + fmt.toPattern()); * * System.out.println("Format with -INF : " + fmt.format(Double.NEGATIVE_INFINITY)); * System.out.println("Format with -1.0 : " + fmt.format(-1.0)); * System.out.println("Format with 0 : " + fmt.format(0)); * System.out.println("Format with 0.9 : " + fmt.format(0.9)); * System.out.println("Format with 1.0 : " + fmt.format(1)); * System.out.println("Format with 1.5 : " + fmt.format(1.5)); * System.out.println("Format with 2 : " + fmt.format(2)); * System.out.println("Format with 2.1 : " + fmt.format(2.1)); * System.out.println("Format with NaN : " + fmt.format(Double.NaN)); * System.out.println("Format with +INF : " + fmt.format(Double.POSITIVE_INFINITY)); * </pre> * </blockquote> * And the output result would be like the following: * <blockquote> * <pre> * Format with -INF : is negative * Format with -1.0 : is negative * Format with 0 : is zero or fraction * Format with 0.9 : is zero or fraction * Format with 1.0 : is one * Format with 1.5 : is 1+ * Format with 2 : is two * Format with 2.1 : is more than 2. * Format with NaN : is negative * Format with +INF : is more than 2. * </pre> * </blockquote> * * <h4><a name="synchronization">Synchronization</a></h4> * * <p> * Choice formats are not synchronized. * It is recommended to create separate format instances for each thread. * If multiple threads access a format concurrently, it must be synchronized * externally. * * * @see DecimalFormat * @see MessageFormat * @author Mark Davis */ public class ChoiceFormat extends NumberFormat { // Proclaim serial compatibility with 1.1 FCS private static final long serialVersionUID = 1795184449645032964L; /** * Sets the pattern. * @param newPattern See the class description. */ public void applyPattern(String newPattern) { StringBuffer[] segments = new StringBuffer[2]; for (int i = 0; i < segments.length; ++i) { segments[i] = new StringBuffer(); } double[] newChoiceLimits = new double[30]; String[] newChoiceFormats = new String[30]; int count = 0; int part = 0; double startValue = 0; double oldStartValue = Double.NaN; boolean inQuote = false; for (int i = 0; i < newPattern.length(); ++i) { char ch = newPattern.charAt(i); if (ch=='\'') { // Check for "''" indicating a literal quote if ((i+1)<newPattern.length() && newPattern.charAt(i+1)==ch) { segments[part].append(ch); ++i; } else { inQuote = !inQuote; } } else if (inQuote) { segments[part].append(ch); } else if (ch == '<' || ch == '#' || ch == '\u2264') { if (segments[0].length() == 0) { throw new IllegalArgumentException(); } try { String tempBuffer = segments[0].toString(); if (tempBuffer.equals("\u221E")) { startValue = Double.POSITIVE_INFINITY; } else if (tempBuffer.equals("-\u221E")) { startValue = Double.NEGATIVE_INFINITY; } else { startValue = Double.valueOf(segments[0].toString()).doubleValue(); } } catch (Exception e) { throw new IllegalArgumentException(); } if (ch == '<' && startValue != Double.POSITIVE_INFINITY && startValue != Double.NEGATIVE_INFINITY) { startValue = nextDouble(startValue); } if (startValue <= oldStartValue) { throw new IllegalArgumentException(); } segments[0].setLength(0); part = 1; } else if (ch == '|') { if (count == newChoiceLimits.length) { newChoiceLimits = doubleArraySize(newChoiceLimits); newChoiceFormats = doubleArraySize(newChoiceFormats); } newChoiceLimits[count] = startValue; newChoiceFormats[count] = segments[1].toString(); ++count; oldStartValue = startValue; segments[1].setLength(0); part = 0; } else { segments[part].append(ch); } } // clean up last one if (part == 1) { if (count == newChoiceLimits.length) { newChoiceLimits = doubleArraySize(newChoiceLimits); newChoiceFormats = doubleArraySize(newChoiceFormats); } newChoiceLimits[count] = startValue; newChoiceFormats[count] = segments[1].toString(); ++count; } choiceLimits = new double[count]; System.arraycopy(newChoiceLimits, 0, choiceLimits, 0, count); choiceFormats = new String[count]; System.arraycopy(newChoiceFormats, 0, choiceFormats, 0, count); } /** * Gets the pattern. */ public String toPattern() { StringBuffer result = new StringBuffer(); for (int i = 0; i < choiceLimits.length; ++i) { if (i != 0) { result.append('|'); } // choose based upon which has less precision // approximate that by choosing the closest one to an integer. // could do better, but it's not worth it. double less = previousDouble(choiceLimits[i]); double tryLessOrEqual = Math.abs(Math.IEEEremainder(choiceLimits[i], 1.0d)); double tryLess = Math.abs(Math.IEEEremainder(less, 1.0d)); if (tryLessOrEqual < tryLess) { result.append(""+choiceLimits[i]); result.append('#'); } else { if (choiceLimits[i] == Double.POSITIVE_INFINITY) { result.append("\u221E"); } else if (choiceLimits[i] == Double.NEGATIVE_INFINITY) { result.append("-\u221E"); } else { result.append(""+less); } result.append('<'); } // Append choiceFormats[i], using quotes if there are special characters. // Single quotes themselves must be escaped in either case. String text = choiceFormats[i]; boolean needQuote = text.indexOf('<') >= 0 || text.indexOf('#') >= 0 || text.indexOf('\u2264') >= 0 || text.indexOf('|') >= 0; if (needQuote) result.append('\''); if (text.indexOf('\'') < 0) result.append(text); else { for (int j=0; j<text.length(); ++j) { char c = text.charAt(j); result.append(c); if (c == '\'') result.append(c); } } if (needQuote) result.append('\''); } return result.toString(); } /** * Constructs with limits and corresponding formats based on the pattern. * @see #applyPattern */ public ChoiceFormat(String newPattern) { applyPattern(newPattern); } /** * Constructs with the limits and the corresponding formats. * @see #setChoices */ public ChoiceFormat(double[] limits, String[] formats) { setChoices(limits, formats); } /** * Set the choices to be used in formatting. * @param limits contains the top value that you want * parsed with that format,and should be in ascending sorted order. When * formatting X, the choice will be the i, where * limit[i] &lt;= X &lt; limit[i+1]. * If the limit array is not in ascending order, the results of formatting * will be incorrect. * @param formats are the formats you want to use for each limit. * They can be either Format objects or Strings. * When formatting with object Y, * if the object is a NumberFormat, then ((NumberFormat) Y).format(X) * is called. Otherwise Y.toString() is called. */ public void setChoices(double[] limits, String formats[]) { if (limits.length != formats.length) { throw new IllegalArgumentException( "Array and limit arrays must be of the same length."); } choiceLimits = limits; choiceFormats = formats; } /** * Get the limits passed in the constructor. * @return the limits. */ public double[] getLimits() { return choiceLimits; } /** * Get the formats passed in the constructor. * @return the formats. */ public Object[] getFormats() { return choiceFormats; } // Overrides /** * Specialization of format. This method really calls * <code>format(double, StringBuffer, FieldPosition)</code> * thus the range of longs that are supported is only equal to * the range that can be stored by double. This will never be * a practical limitation. */ public StringBuffer format(long number, StringBuffer toAppendTo, FieldPosition status) { return format((double)number, toAppendTo, status); } /** * Returns pattern with formatted double. * @param number number to be formatted & substituted. * @param toAppendTo where text is appended. * @param status ignore no useful status is returned. */ public StringBuffer format(double number, StringBuffer toAppendTo, FieldPosition status) { // find the number int i; for (i = 0; i < choiceLimits.length; ++i) { if (!(number >= choiceLimits[i])) { // same as number < choiceLimits, except catchs NaN break; } } --i; if (i < 0) i = 0; // return either a formatted number, or a string return toAppendTo.append(choiceFormats[i]); } /** * Parses a Number from the input text. * @param text the source text. * @param status an input-output parameter. On input, the * status.index field indicates the first character of the * source text that should be parsed. On exit, if no error * occured, status.index is set to the first unparsed character * in the source text. On exit, if an error did occur, * status.index is unchanged and status.errorIndex is set to the * first index of the character that caused the parse to fail. * @return A Number representing the value of the number parsed. */ public Number parse(String text, ParsePosition status) { // find the best number (defined as the one with the longest parse) int start = status.index; int furthest = start; double bestNumber = Double.NaN; double tempNumber = 0.0; for (int i = 0; i < choiceFormats.length; ++i) { String tempString = choiceFormats[i]; if (text.regionMatches(start, tempString, 0, tempString.length())) { status.index = start + tempString.length(); tempNumber = choiceLimits[i]; if (status.index > furthest) { furthest = status.index; bestNumber = tempNumber; if (furthest == text.length()) break; } } } status.index = furthest; if (status.index == start) { status.errorIndex = furthest; } return new Double(bestNumber); } /** * Finds the least double greater than d. * If NaN, returns same value. * <p>Used to make half-open intervals. * @see #previousDouble */ public static final double nextDouble (double d) { return nextDouble(d,true); } /** * Finds the greatest double less than d. * If NaN, returns same value. * @see #nextDouble */ public static final double previousDouble (double d) { return nextDouble(d,false); } /** * Overrides Cloneable */ public Object clone() { ChoiceFormat other = (ChoiceFormat) super.clone(); // for primitives or immutables, shallow clone is enough other.choiceLimits = (double[]) choiceLimits.clone(); other.choiceFormats = (String[]) choiceFormats.clone(); return other; } /** * Generates a hash code for the message format object. */ public int hashCode() { int result = choiceLimits.length; if (choiceFormats.length > 0) { // enough for reasonable distribution result ^= choiceFormats[choiceFormats.length-1].hashCode(); } return result; } /** * Equality comparision between two */ public boolean equals(Object obj) { if (obj == null) return false; if (this == obj) // quick check return true; if (getClass() != obj.getClass()) return false; ChoiceFormat other = (ChoiceFormat) obj; return (Arrays.equals(choiceLimits, other.choiceLimits) && Arrays.equals(choiceFormats, other.choiceFormats)); } /** * After reading an object from the input stream, do a simple verification * to maintain class invariants. * @throws InvalidObjectException if the objects read from the stream is invalid. */ private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { in.defaultReadObject(); if (choiceLimits.length != choiceFormats.length) { throw new InvalidObjectException( "limits and format arrays of different length."); } } // ===============privates=========================== /** * A list of lower bounds for the choices. The formatter will return * <code>choiceFormats[i]</code> if the number being formatted is greater than or equal to * <code>choiceLimits[i]</code> and less than <code>choiceLimits[i+1]</code>. * @serial */ private double[] choiceLimits; /** * A list of choice strings. The formatter will return * <code>choiceFormats[i]</code> if the number being formatted is greater than or equal to * <code>choiceLimits[i]</code> and less than <code>choiceLimits[i+1]</code>. * @serial */ private String[] choiceFormats; /* static final long SIGN = 0x8000000000000000L; static final long EXPONENT = 0x7FF0000000000000L; static final long SIGNIFICAND = 0x000FFFFFFFFFFFFFL; private static double nextDouble (double d, boolean positive) { if (Double.isNaN(d) || Double.isInfinite(d)) { return d; } long bits = Double.doubleToLongBits(d); long significand = bits & SIGNIFICAND; if (bits < 0) { significand |= (SIGN | EXPONENT); } long exponent = bits & EXPONENT; if (positive) { significand += 1; // FIXME fix overflow & underflow } else { significand -= 1; // FIXME fix overflow & underflow } bits = exponent | (significand & ~EXPONENT); return Double.longBitsToDouble(bits); } */ static final long SIGN = 0x8000000000000000L; static final long EXPONENT = 0x7FF0000000000000L; static final long POSITIVEINFINITY = 0x7FF0000000000000L; /** * Finds the least double greater than d (if positive == true), * or the greatest double less than d (if positive == false). * If NaN, returns same value. * * Does not affect floating-point flags, * provided these member functions do not: * Double.longBitsToDouble(long) * Double.doubleToLongBits(double) * Double.isNaN(double) */ public static double nextDouble (double d, boolean positive) { /* filter out NaN's */ if (Double.isNaN(d)) { return d; } /* zero's are also a special case */ if (d == 0.0) { double smallestPositiveDouble = Double.longBitsToDouble(1L); if (positive) { return smallestPositiveDouble; } else { return -smallestPositiveDouble; } } /* if entering here, d is a nonzero value */ /* hold all bits in a long for later use */ long bits = Double.doubleToLongBits(d); /* strip off the sign bit */ long magnitude = bits & ~SIGN; /* if next double away from zero, increase magnitude */ if ((bits > 0) == positive) { if (magnitude != POSITIVEINFINITY) { magnitude += 1; } } /* else decrease magnitude */ else { magnitude -= 1; } /* restore sign bit and return */ long signbit = bits & SIGN; return Double.longBitsToDouble (magnitude | signbit); } private static double[] doubleArraySize(double[] array) { int oldSize = array.length; double[] newArray = new double[oldSize * 2]; System.arraycopy(array, 0, newArray, 0, oldSize); return newArray; } private String[] doubleArraySize(String[] array) { int oldSize = array.length; String[] newArray = new String[oldSize * 2]; System.arraycopy(array, 0, newArray, 0, oldSize); return newArray; } }
/* * Copyright 2012-2016 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.autoconfigure.mongo; import java.net.UnknownHostException; import java.util.List; import com.mongodb.MongoClient; import com.mongodb.MongoClientOptions; import com.mongodb.MongoCredential; import com.mongodb.ServerAddress; import com.mongodb.connection.Cluster; import com.mongodb.connection.ClusterSettings; import org.junit.Test; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.boot.test.util.EnvironmentTestUtils; import org.springframework.context.annotation.AnnotationConfigApplicationContext; import org.springframework.context.annotation.Configuration; import org.springframework.test.util.ReflectionTestUtils; import static org.assertj.core.api.Assertions.assertThat; /** * Tests for {@link MongoProperties}. * * @author Phillip Webb * @author Andy Wilkinson */ public class MongoPropertiesTests { @Test public void canBindCharArrayPassword() { // gh-1572 AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(); EnvironmentTestUtils.addEnvironment(context, "spring.data.mongodb.password:word"); context.register(Config.class); context.refresh(); MongoProperties properties = context.getBean(MongoProperties.class); assertThat(properties.getPassword()).isEqualTo("word".toCharArray()); } @Test public void portCanBeCustomized() throws UnknownHostException { MongoProperties properties = new MongoProperties(); properties.setPort(12345); MongoClient client = properties.createMongoClient(null, null); List<ServerAddress> allAddresses = extractServerAddresses(client); assertThat(allAddresses).hasSize(1); assertServerAddress(allAddresses.get(0), "localhost", 12345); } @Test public void hostCanBeCustomized() throws UnknownHostException { MongoProperties properties = new MongoProperties(); properties.setHost("mongo.example.com"); MongoClient client = properties.createMongoClient(null, null); List<ServerAddress> allAddresses = extractServerAddresses(client); assertThat(allAddresses).hasSize(1); assertServerAddress(allAddresses.get(0), "mongo.example.com", 27017); } @Test public void credentialsCanBeCustomized() throws UnknownHostException { MongoProperties properties = new MongoProperties(); properties.setUsername("user"); properties.setPassword("secret".toCharArray()); MongoClient client = properties.createMongoClient(null, null); assertMongoCredential(client.getCredentialsList().get(0), "user", "secret", "test"); } @Test public void databaseCanBeCustomized() throws UnknownHostException { MongoProperties properties = new MongoProperties(); properties.setDatabase("foo"); properties.setUsername("user"); properties.setPassword("secret".toCharArray()); MongoClient client = properties.createMongoClient(null, null); assertMongoCredential(client.getCredentialsList().get(0), "user", "secret", "foo"); } @Test public void authenticationDatabaseCanBeCustomized() throws UnknownHostException { MongoProperties properties = new MongoProperties(); properties.setAuthenticationDatabase("foo"); properties.setUsername("user"); properties.setPassword("secret".toCharArray()); MongoClient client = properties.createMongoClient(null, null); assertMongoCredential(client.getCredentialsList().get(0), "user", "secret", "foo"); } @Test public void uriCanBeCustomized() throws UnknownHostException { MongoProperties properties = new MongoProperties(); properties.setUri("mongodb://user:secret@mongo1.example.com:12345," + "mongo2.example.com:23456/test"); MongoClient client = properties.createMongoClient(null, null); List<ServerAddress> allAddresses = extractServerAddresses(client); assertThat(allAddresses).hasSize(2); assertServerAddress(allAddresses.get(0), "mongo1.example.com", 12345); assertServerAddress(allAddresses.get(1), "mongo2.example.com", 23456); List<MongoCredential> credentialsList = client.getCredentialsList(); assertThat(credentialsList).hasSize(1); assertMongoCredential(credentialsList.get(0), "user", "secret", "test"); } @Test public void allMongoClientOptionsCanBeSet() throws UnknownHostException { MongoClientOptions.Builder builder = MongoClientOptions.builder(); builder.alwaysUseMBeans(true); builder.connectionsPerHost(101); builder.connectTimeout(10001); builder.cursorFinalizerEnabled(false); builder.description("test"); builder.maxWaitTime(120001); builder.socketKeepAlive(true); builder.socketTimeout(1000); builder.threadsAllowedToBlockForConnectionMultiplier(6); builder.minConnectionsPerHost(0); builder.maxConnectionIdleTime(60000); builder.maxConnectionLifeTime(60000); builder.heartbeatFrequency(10001); builder.minHeartbeatFrequency(501); builder.heartbeatConnectTimeout(20001); builder.heartbeatSocketTimeout(20001); builder.localThreshold(20); builder.requiredReplicaSetName("testReplicaSetName"); MongoClientOptions options = builder.build(); MongoProperties properties = new MongoProperties(); MongoClient client = properties.createMongoClient(options, null); MongoClientOptions wrapped = client.getMongoClientOptions(); assertThat(wrapped.isAlwaysUseMBeans()).isEqualTo(options.isAlwaysUseMBeans()); assertThat(wrapped.getConnectionsPerHost()) .isEqualTo(options.getConnectionsPerHost()); assertThat(wrapped.getConnectTimeout()).isEqualTo(options.getConnectTimeout()); assertThat(wrapped.isCursorFinalizerEnabled()) .isEqualTo(options.isCursorFinalizerEnabled()); assertThat(wrapped.getDescription()).isEqualTo(options.getDescription()); assertThat(wrapped.getMaxWaitTime()).isEqualTo(options.getMaxWaitTime()); assertThat(wrapped.getSocketTimeout()).isEqualTo(options.getSocketTimeout()); assertThat(wrapped.isSocketKeepAlive()).isEqualTo(options.isSocketKeepAlive()); assertThat(wrapped.getThreadsAllowedToBlockForConnectionMultiplier()) .isEqualTo(options.getThreadsAllowedToBlockForConnectionMultiplier()); assertThat(wrapped.getMinConnectionsPerHost()) .isEqualTo(options.getMinConnectionsPerHost()); assertThat(wrapped.getMaxConnectionIdleTime()) .isEqualTo(options.getMaxConnectionIdleTime()); assertThat(wrapped.getMaxConnectionLifeTime()) .isEqualTo(options.getMaxConnectionLifeTime()); assertThat(wrapped.getHeartbeatFrequency()) .isEqualTo(options.getHeartbeatFrequency()); assertThat(wrapped.getMinHeartbeatFrequency()) .isEqualTo(options.getMinHeartbeatFrequency()); assertThat(wrapped.getHeartbeatConnectTimeout()) .isEqualTo(options.getHeartbeatConnectTimeout()); assertThat(wrapped.getHeartbeatSocketTimeout()) .isEqualTo(options.getHeartbeatSocketTimeout()); assertThat(wrapped.getLocalThreshold()).isEqualTo(options.getLocalThreshold()); assertThat(wrapped.getRequiredReplicaSetName()) .isEqualTo(options.getRequiredReplicaSetName()); } private List<ServerAddress> extractServerAddresses(MongoClient client) { Cluster cluster = (Cluster) ReflectionTestUtils.getField(client, "cluster"); ClusterSettings clusterSettings = (ClusterSettings) ReflectionTestUtils .getField(cluster, "settings"); List<ServerAddress> allAddresses = clusterSettings.getHosts(); return allAddresses; } private void assertServerAddress(ServerAddress serverAddress, String expectedHost, int expectedPort) { assertThat(serverAddress.getHost()).isEqualTo(expectedHost); assertThat(serverAddress.getPort()).isEqualTo(expectedPort); } private void assertMongoCredential(MongoCredential credentials, String expectedUsername, String expectedPassword, String expectedSource) { assertThat(credentials.getUserName()).isEqualTo(expectedUsername); assertThat(credentials.getPassword()).isEqualTo(expectedPassword.toCharArray()); assertThat(credentials.getSource()).isEqualTo(expectedSource); } @Configuration @EnableConfigurationProperties(MongoProperties.class) static class Config { } }
package com.gome.haoyuangong.net.result.ask; import java.util.ArrayList; import java.util.List; import android.text.SpannableString; import com.gome.haoyuangong.net.result.BaseResultWeb; import com.gome.haoyuangong.net.result.ask.AskItemAnswerResult.AskItemAnswer; import com.gome.haoyuangong.net.result.tougu.TouguUserBean; public class AskDetailResult extends BaseResultWeb{ private AskDetail data=new AskDetail(); public AskDetail getData() { return data; } public void setData(AskDetail data) { this.data = data; } public class AskDetail{ private int askId; private String ausername; private String auserId; private String content; private long ctime; private List<AskDetailAnswer> answers = new ArrayList<AskDetailResult.AskDetailAnswer>(); transient private SpannableString contentSpanStr; public SpannableString getContentSpanStr() { return contentSpanStr; } public void setContentSpanStr(SpannableString contentSpanStr) { this.contentSpanStr = contentSpanStr; } public String getAusername() { return ausername; } public void setAusername(String ausername) { this.ausername = ausername; } public List<AskDetailAnswer> getAnswers() { return answers; } public void setAnswers(List<AskDetailAnswer> answers) { this.answers = answers; } public int getAskId() { return askId; } public void setAskId(int askId) { this.askId = askId; } public String getAuserId() { return auserId; } public void setAuserId(String auserId) { this.auserId = auserId; } public String getContent() { return content; } public void setContent(String content) { this.content = content; } public long getCtime() { return ctime; } public void setCtime(long ctime) { this.ctime = ctime; } } public class AskDetailAnswer{ private int answerId; private String content; private long ctime; private int evaluate=-1; private String evaContent; private String voiceAmr; private int voicelength; private boolean includeImage; private TouguUserBean adviserUser=new TouguUserBean(); private AgainAskVo againAskVo; transient private SpannableString contentSpanStr; transient private SpannableString evaContentSpanStr; public SpannableString getEvaContentSpanStr() { return evaContentSpanStr; } public void setEvaContentSpanStr(SpannableString evaContentSpanStr) { this.evaContentSpanStr = evaContentSpanStr; } public SpannableString getContentSpanStr() { return contentSpanStr; } public void setContentSpanStr(SpannableString contentSpanStr) { this.contentSpanStr = contentSpanStr; } public boolean isIncludeImage() { return includeImage; } public void setIncludeImage(boolean includeImage) { this.includeImage = includeImage; } public int getVoicelength() { return voicelength; } public void setVoicelength(int voicelength) { this.voicelength = voicelength; } public String getEvaContent() { return evaContent; } public void setEvaContent(String evaContent) { this.evaContent = evaContent; } public String getVoiceAmr() { return voiceAmr; } public void setVoiceAmr(String voiceAmr) { this.voiceAmr = voiceAmr; } public int getAnswerId() { return answerId; } public void setAnswerId(int answerId) { this.answerId = answerId; } public String getContent() { return content; } public void setContent(String content) { this.content = content; } public long getCtime() { return ctime; } public void setCtime(long ctime) { this.ctime = ctime; } public int getEvaluate() { return evaluate; } public void setEvaluate(int evaluate) { this.evaluate = evaluate; } public TouguUserBean getAdviserUser() { return adviserUser; } public void setAdviserUser(TouguUserBean adviserUser) { this.adviserUser = adviserUser; } public AgainAskVo getAgainAskVo() { return againAskVo; } public void setAgainAskVo(AgainAskVo againAskVo) { this.againAskVo = againAskVo; } public boolean isHaveEvaluate(){ return evaluate>0; } } public class AgainAskVo{ private String againanswerContent=""; private String againanswerVoiceAmr; private int againanswerVoicelength; private long againanswerCtime; private String content; private long ctime; private int hasAgainanswer; private int id; transient private SpannableString contentSpanStr; transient private SpannableString againanswerContentSpanStr; public SpannableString getAgainanswerContentSpanStr() { return againanswerContentSpanStr; } public void setAgainanswerContentSpanStr(SpannableString againanswerContentSpanStr) { this.againanswerContentSpanStr = againanswerContentSpanStr; } public SpannableString getContentSpanStr() { return contentSpanStr; } public void setContentSpanStr(SpannableString contentSpanStr) { this.contentSpanStr = contentSpanStr; } public String getAgainanswerVoiceAmr() { return againanswerVoiceAmr; } public void setAgainanswerVoiceAmr(String againanswerVoiceAmr) { this.againanswerVoiceAmr = againanswerVoiceAmr; } public int getAgainanswerVoicelength() { return againanswerVoicelength; } public void setAgainanswerVoicelength(int againanswerVoicelength) { this.againanswerVoicelength = againanswerVoicelength; } public String getAgainanswerContent() { return againanswerContent; } public void setAgainanswerContent(String againanswerContent) { this.againanswerContent = againanswerContent; } public long getAgainanswerCtime() { return againanswerCtime; } public void setAgainanswerCtime(long againanswerCtime) { this.againanswerCtime = againanswerCtime; } public String getContent() { return content; } public void setContent(String content) { this.content = content; } public long getCtime() { return ctime; } public void setCtime(long ctime) { this.ctime = ctime; } public int getHasAgainanswer() { return hasAgainanswer; } public void setHasAgainanswer(int hasAgainanswer) { this.hasAgainanswer = hasAgainanswer; } public int getId() { return id; } public void setId(int id) { this.id = id; } } }
/* Generated by camel build tools - do NOT edit this file! */ package org.apache.camel.component.web3j; import java.util.Map; import org.apache.camel.CamelContext; import org.apache.camel.spi.ConfigurerStrategy; import org.apache.camel.spi.GeneratedPropertyConfigurer; import org.apache.camel.spi.PropertyConfigurerGetter; import org.apache.camel.util.CaseInsensitiveMap; import org.apache.camel.support.component.PropertyConfigurerSupport; /** * Generated by camel build tools - do NOT edit this file! */ @SuppressWarnings("unchecked") public class Web3jComponentConfigurer extends PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter { private static final Map<String, Object> ALL_OPTIONS; static { Map<String, Object> map = new CaseInsensitiveMap(); map.put("addresses", java.util.List.class); map.put("configuration", org.apache.camel.component.web3j.Web3jConfiguration.class); map.put("fromAddress", java.lang.String.class); map.put("fromBlock", java.lang.String.class); map.put("fullTransactionObjects", boolean.class); map.put("gasLimit", java.math.BigInteger.class); map.put("privateFor", java.util.List.class); map.put("quorumAPI", boolean.class); map.put("toAddress", java.lang.String.class); map.put("toBlock", java.lang.String.class); map.put("topics", java.lang.String.class); map.put("web3j", org.web3j.protocol.Web3j.class); map.put("bridgeErrorHandler", boolean.class); map.put("address", java.lang.String.class); map.put("atBlock", java.lang.String.class); map.put("blockHash", java.lang.String.class); map.put("clientId", java.lang.String.class); map.put("data", java.lang.String.class); map.put("databaseName", java.lang.String.class); map.put("filterId", java.math.BigInteger.class); map.put("gasPrice", java.math.BigInteger.class); map.put("hashrate", java.lang.String.class); map.put("headerPowHash", java.lang.String.class); map.put("index", java.math.BigInteger.class); map.put("keyName", java.lang.String.class); map.put("lazyStartProducer", boolean.class); map.put("mixDigest", java.lang.String.class); map.put("nonce", java.lang.String.class); map.put("operation", java.lang.String.class); map.put("position", java.math.BigInteger.class); map.put("priority", java.math.BigInteger.class); map.put("sha3HashOfDataToSign", java.lang.String.class); map.put("signedTransactionData", java.lang.String.class); map.put("sourceCode", java.lang.String.class); map.put("transactionHash", java.lang.String.class); map.put("ttl", java.math.BigInteger.class); map.put("value", java.math.BigInteger.class); map.put("basicPropertyBinding", boolean.class); ALL_OPTIONS = map; ConfigurerStrategy.addConfigurerClearer(Web3jComponentConfigurer::clearConfigurers); } private org.apache.camel.component.web3j.Web3jConfiguration getOrCreateConfiguration(Web3jComponent target) { if (target.getConfiguration() == null) { target.setConfiguration(new org.apache.camel.component.web3j.Web3jConfiguration()); } return target.getConfiguration(); } @Override public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) { Web3jComponent target = (Web3jComponent) obj; switch (ignoreCase ? name.toLowerCase() : name) { case "address": getOrCreateConfiguration(target).setAddress(property(camelContext, java.lang.String.class, value)); return true; case "addresses": getOrCreateConfiguration(target).setAddresses(property(camelContext, java.util.List.class, value)); return true; case "atblock": case "atBlock": getOrCreateConfiguration(target).setAtBlock(property(camelContext, java.lang.String.class, value)); return true; case "basicpropertybinding": case "basicPropertyBinding": target.setBasicPropertyBinding(property(camelContext, boolean.class, value)); return true; case "blockhash": case "blockHash": getOrCreateConfiguration(target).setBlockHash(property(camelContext, java.lang.String.class, value)); return true; case "bridgeerrorhandler": case "bridgeErrorHandler": target.setBridgeErrorHandler(property(camelContext, boolean.class, value)); return true; case "clientid": case "clientId": getOrCreateConfiguration(target).setClientId(property(camelContext, java.lang.String.class, value)); return true; case "configuration": target.setConfiguration(property(camelContext, org.apache.camel.component.web3j.Web3jConfiguration.class, value)); return true; case "data": getOrCreateConfiguration(target).setData(property(camelContext, java.lang.String.class, value)); return true; case "databasename": case "databaseName": getOrCreateConfiguration(target).setDatabaseName(property(camelContext, java.lang.String.class, value)); return true; case "filterid": case "filterId": getOrCreateConfiguration(target).setFilterId(property(camelContext, java.math.BigInteger.class, value)); return true; case "fromaddress": case "fromAddress": getOrCreateConfiguration(target).setFromAddress(property(camelContext, java.lang.String.class, value)); return true; case "fromblock": case "fromBlock": getOrCreateConfiguration(target).setFromBlock(property(camelContext, java.lang.String.class, value)); return true; case "fulltransactionobjects": case "fullTransactionObjects": getOrCreateConfiguration(target).setFullTransactionObjects(property(camelContext, boolean.class, value)); return true; case "gaslimit": case "gasLimit": getOrCreateConfiguration(target).setGasLimit(property(camelContext, java.math.BigInteger.class, value)); return true; case "gasprice": case "gasPrice": getOrCreateConfiguration(target).setGasPrice(property(camelContext, java.math.BigInteger.class, value)); return true; case "hashrate": getOrCreateConfiguration(target).setHashrate(property(camelContext, java.lang.String.class, value)); return true; case "headerpowhash": case "headerPowHash": getOrCreateConfiguration(target).setHeaderPowHash(property(camelContext, java.lang.String.class, value)); return true; case "index": getOrCreateConfiguration(target).setIndex(property(camelContext, java.math.BigInteger.class, value)); return true; case "keyname": case "keyName": getOrCreateConfiguration(target).setKeyName(property(camelContext, java.lang.String.class, value)); return true; case "lazystartproducer": case "lazyStartProducer": target.setLazyStartProducer(property(camelContext, boolean.class, value)); return true; case "mixdigest": case "mixDigest": getOrCreateConfiguration(target).setMixDigest(property(camelContext, java.lang.String.class, value)); return true; case "nonce": getOrCreateConfiguration(target).setNonce(property(camelContext, java.lang.String.class, value)); return true; case "operation": getOrCreateConfiguration(target).setOperation(property(camelContext, java.lang.String.class, value)); return true; case "position": getOrCreateConfiguration(target).setPosition(property(camelContext, java.math.BigInteger.class, value)); return true; case "priority": getOrCreateConfiguration(target).setPriority(property(camelContext, java.math.BigInteger.class, value)); return true; case "privatefor": case "privateFor": getOrCreateConfiguration(target).setPrivateFor(property(camelContext, java.util.List.class, value)); return true; case "quorumapi": case "quorumAPI": getOrCreateConfiguration(target).setQuorumAPI(property(camelContext, boolean.class, value)); return true; case "sha3hashofdatatosign": case "sha3HashOfDataToSign": getOrCreateConfiguration(target).setSha3HashOfDataToSign(property(camelContext, java.lang.String.class, value)); return true; case "signedtransactiondata": case "signedTransactionData": getOrCreateConfiguration(target).setSignedTransactionData(property(camelContext, java.lang.String.class, value)); return true; case "sourcecode": case "sourceCode": getOrCreateConfiguration(target).setSourceCode(property(camelContext, java.lang.String.class, value)); return true; case "toaddress": case "toAddress": getOrCreateConfiguration(target).setToAddress(property(camelContext, java.lang.String.class, value)); return true; case "toblock": case "toBlock": getOrCreateConfiguration(target).setToBlock(property(camelContext, java.lang.String.class, value)); return true; case "topics": getOrCreateConfiguration(target).setTopics(property(camelContext, java.lang.String.class, value)); return true; case "transactionhash": case "transactionHash": getOrCreateConfiguration(target).setTransactionHash(property(camelContext, java.lang.String.class, value)); return true; case "ttl": getOrCreateConfiguration(target).setTtl(property(camelContext, java.math.BigInteger.class, value)); return true; case "value": getOrCreateConfiguration(target).setValue(property(camelContext, java.math.BigInteger.class, value)); return true; case "web3j": getOrCreateConfiguration(target).setWeb3j(property(camelContext, org.web3j.protocol.Web3j.class, value)); return true; default: return false; } } @Override public Map<String, Object> getAllOptions(Object target) { return ALL_OPTIONS; } public static void clearBootstrapConfigurers() { } public static void clearConfigurers() { ALL_OPTIONS.clear(); } @Override public Object getOptionValue(Object obj, String name, boolean ignoreCase) { Web3jComponent target = (Web3jComponent) obj; switch (ignoreCase ? name.toLowerCase() : name) { case "address": return getOrCreateConfiguration(target).getAddress(); case "addresses": return getOrCreateConfiguration(target).getAddresses(); case "atblock": case "atBlock": return getOrCreateConfiguration(target).getAtBlock(); case "basicpropertybinding": case "basicPropertyBinding": return target.isBasicPropertyBinding(); case "blockhash": case "blockHash": return getOrCreateConfiguration(target).getBlockHash(); case "bridgeerrorhandler": case "bridgeErrorHandler": return target.isBridgeErrorHandler(); case "clientid": case "clientId": return getOrCreateConfiguration(target).getClientId(); case "configuration": return target.getConfiguration(); case "data": return getOrCreateConfiguration(target).getData(); case "databasename": case "databaseName": return getOrCreateConfiguration(target).getDatabaseName(); case "filterid": case "filterId": return getOrCreateConfiguration(target).getFilterId(); case "fromaddress": case "fromAddress": return getOrCreateConfiguration(target).getFromAddress(); case "fromblock": case "fromBlock": return getOrCreateConfiguration(target).getFromBlock(); case "fulltransactionobjects": case "fullTransactionObjects": return getOrCreateConfiguration(target).isFullTransactionObjects(); case "gaslimit": case "gasLimit": return getOrCreateConfiguration(target).getGasLimit(); case "gasprice": case "gasPrice": return getOrCreateConfiguration(target).getGasPrice(); case "hashrate": return getOrCreateConfiguration(target).getHashrate(); case "headerpowhash": case "headerPowHash": return getOrCreateConfiguration(target).getHeaderPowHash(); case "index": return getOrCreateConfiguration(target).getIndex(); case "keyname": case "keyName": return getOrCreateConfiguration(target).getKeyName(); case "lazystartproducer": case "lazyStartProducer": return target.isLazyStartProducer(); case "mixdigest": case "mixDigest": return getOrCreateConfiguration(target).getMixDigest(); case "nonce": return getOrCreateConfiguration(target).getNonce(); case "operation": return getOrCreateConfiguration(target).getOperation(); case "position": return getOrCreateConfiguration(target).getPosition(); case "priority": return getOrCreateConfiguration(target).getPriority(); case "privatefor": case "privateFor": return getOrCreateConfiguration(target).getPrivateFor(); case "quorumapi": case "quorumAPI": return getOrCreateConfiguration(target).isQuorumAPI(); case "sha3hashofdatatosign": case "sha3HashOfDataToSign": return getOrCreateConfiguration(target).getSha3HashOfDataToSign(); case "signedtransactiondata": case "signedTransactionData": return getOrCreateConfiguration(target).getSignedTransactionData(); case "sourcecode": case "sourceCode": return getOrCreateConfiguration(target).getSourceCode(); case "toaddress": case "toAddress": return getOrCreateConfiguration(target).getToAddress(); case "toblock": case "toBlock": return getOrCreateConfiguration(target).getToBlock(); case "topics": return getOrCreateConfiguration(target).getTopics(); case "transactionhash": case "transactionHash": return getOrCreateConfiguration(target).getTransactionHash(); case "ttl": return getOrCreateConfiguration(target).getTtl(); case "value": return getOrCreateConfiguration(target).getValue(); case "web3j": return getOrCreateConfiguration(target).getWeb3j(); default: return null; } } }
/* * * Copyright (c) 2014, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.apacheds.impl; import org.apache.directory.server.constants.ServerDNConstants; import org.apache.directory.server.core.DefaultDirectoryService; import org.apache.directory.server.core.DirectoryService; import org.apache.directory.server.core.factory.DirectoryServiceFactory; import org.apache.directory.server.core.factory.JdbmPartitionFactory; import org.apache.directory.server.core.factory.PartitionFactory; import org.apache.directory.server.core.partition.Partition; import org.apache.directory.server.core.partition.ldif.LdifPartition; import org.apache.directory.server.core.schema.SchemaPartition; import org.apache.directory.server.i18n.I18n; import org.apache.directory.shared.ldap.constants.SchemaConstants; import org.apache.directory.shared.ldap.schema.SchemaManager; import org.apache.directory.shared.ldap.schema.ldif.extractor.SchemaLdifExtractor; import org.apache.directory.shared.ldap.schema.loader.ldif.LdifSchemaLoader; import org.apache.directory.shared.ldap.schema.manager.impl.DefaultSchemaManager; import org.apache.directory.shared.ldap.schema.registries.SchemaLoader; import org.apache.directory.shared.ldap.util.ExceptionUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.wso2.carbon.ldap.server.exception.DirectoryServerException; import java.io.File; import java.util.List; class CarbonDirectoryServiceFactory implements DirectoryServiceFactory { /** * A logger for this class */ private static final Logger LOG = LoggerFactory.getLogger(CarbonDirectoryServiceFactory.class); /*Partition cache size is expressed as number of entries*/ private static final int PARTITION_CACHE_SIZE = 500; private static final int INDEX_CACHE_SIZE = 100; /** * The default factory returns stock instances of a apacheds service with smart defaults */ public static final DirectoryServiceFactory DEFAULT = new CarbonDirectoryServiceFactory(); /** * The apacheds service. */ private DirectoryService directoryService; /** * The partition factory. */ private PartitionFactory partitionFactory; private String schemaZipStore; /* default access */ @SuppressWarnings({"unchecked"}) CarbonDirectoryServiceFactory() { try { // creating the instance here so that // we we can set some properties like access control, anon access // before starting up the service directoryService = new DefaultDirectoryService(); } catch (Exception e) { String errorMessage = "Error in initializing the default directory service."; LOG.error(errorMessage); throw new RuntimeException(errorMessage, e); } try { String typeName = System.getProperty("apacheds.partition.factory"); if (typeName != null) { Class<? extends PartitionFactory> type = (Class<? extends PartitionFactory>) Class.forName(typeName); partitionFactory = type.newInstance(); } else { partitionFactory = new JdbmPartitionFactory(); } } catch (Exception e) { String errorMessage = "Error instantiating custom partition factory"; LOG.error(errorMessage, e); throw new RuntimeException(errorMessage, e); } } /** * {@inheritDoc} */ @Override public void init(String name) throws Exception { this.schemaZipStore = System.getProperty("schema.zip.store.location"); if (this.schemaZipStore == null) { throw new DirectoryServerException( "Schema Jar repository is not set. Please set schema.jar.location property " + "with proper schema storage"); } if (directoryService != null && directoryService.isStarted()) { return; } build(name); } /** * Build the working apacheds * * @param name Name of the working directory. */ private void buildWorkingDirectory(String name) { String workingDirectory = System.getProperty("workingDirectory"); if (workingDirectory == null) { workingDirectory = System.getProperty("java.io.tmpdir") + File.separator + "server-work-" + name; } directoryService.setWorkingDirectory(new File(workingDirectory)); } /** * Inits the schema and schema partition. * * @throws Exception If unable to extract schema files. */ private void initSchema() throws Exception { SchemaPartition schemaPartition = directoryService.getSchemaService().getSchemaPartition(); // Init the LdifPartition LdifPartition ldifPartition = new LdifPartition(); String workingDirectory = directoryService.getWorkingDirectory().getPath(); ldifPartition.setWorkingDirectory(workingDirectory + File.separator + "schema"); // Extract the schema on disk (a brand new one) and load the registries File schemaRepository = new File(workingDirectory, "schema"); if (!schemaRepository.exists()) { SchemaLdifExtractor extractor = new CarbonSchemaLdifExtractor(new File(workingDirectory), new File(this.schemaZipStore)); extractor.extractOrCopy(); } schemaPartition.setWrappedPartition(ldifPartition); SchemaLoader loader = new LdifSchemaLoader(schemaRepository); SchemaManager schemaManager = new DefaultSchemaManager(loader); directoryService.setSchemaManager(schemaManager); // We have to load the schema now, otherwise we won't be able // to initialize the Partitions, as we won't be able to parse // and normalize their suffix DN schemaManager.loadAllEnabled(); schemaPartition.setSchemaManager(schemaManager); List<Throwable> errors = schemaManager.getErrors(); if (!errors.isEmpty()) { throw new DirectoryServerException(I18n.err(I18n.ERR_317, ExceptionUtils.printErrors(errors))); } } /** * Inits the system partition. * * @throws Exception the exception */ private void initSystemPartition() throws Exception { // change the working apacheds to something that is unique // on the system and somewhere either under target apacheds // or somewhere in a temp area of the machine. // Inject the System Partition Partition systemPartition = partitionFactory.createPartition( "system", ServerDNConstants.SYSTEM_DN, PARTITION_CACHE_SIZE, new File(directoryService.getWorkingDirectory(), "system")); systemPartition.setSchemaManager(directoryService.getSchemaManager()); partitionFactory.addIndex(systemPartition, SchemaConstants.OBJECT_CLASS_AT, INDEX_CACHE_SIZE); directoryService.setSystemPartition(systemPartition); } /** * Builds the apacheds server instance. * * @param name the instance name * @throws Exception In case if unable to extract schema or if an error occurred when building * the working directory. */ private void build(String name) throws Exception { directoryService.setInstanceId(name); buildWorkingDirectory(name); // Init the service now initSchema(); initSystemPartition(); directoryService.startup(); } /** * {@inheritDoc} */ @Override public DirectoryService getDirectoryService() throws Exception { return directoryService; } /** * {@inheritDoc} */ @Override public PartitionFactory getPartitionFactory() throws Exception { return partitionFactory; } }
package org.unitime.timetable.test; import java.io.BufferedReader; import java.io.FileReader; import java.io.FileWriter; import java.text.DecimalFormat; import java.text.NumberFormat; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.log4j.Logger; import org.cpsolver.ifs.util.ToolBox; import org.dom4j.Document; import org.dom4j.DocumentHelper; import org.dom4j.Element; import org.dom4j.io.OutputFormat; import org.dom4j.io.XMLWriter; import org.unitime.commons.hibernate.util.HibernateUtil; import org.unitime.timetable.ApplicationProperties; import org.unitime.timetable.model.CourseOffering; import org.unitime.timetable.model.Session; import org.unitime.timetable.model.dao.SessionDAO; import org.unitime.timetable.onlinesectioning.AcademicSessionInfo; import org.unitime.timetable.onlinesectioning.custom.purdue.BannerTermProvider; public class DegreeWorksImportTest { protected static Logger sLog = Logger.getLogger(DegreeWorksImportTest.class); public static int guessEnrollmentFromLastLike(org.hibernate.Session hibSession, Session session, String area, String major, String classification) { return ((Number)hibSession.createQuery( "select count(distinct d.student) from LastLikeCourseDemand d inner join d.student.academicAreaClassifications aac " + "inner join d.student.posMajors m where d.subjectArea.session = :sessionId and " + "m.code=:major and aac.academicArea.academicAreaAbbreviation = :area and aac.academicClassification.code = :clasf") .setLong("sessionId", session.getUniqueId()) .setString("area", area) .setString("major", major) .setString("clasf", classification) .uniqueResult()).intValue(); } public static int guessEnrollmentFromReal(org.hibernate.Session hibSession, Session session, String area, String major, String classification) { return ((Number)hibSession.createQuery( "select count(distinct e.student) from StudentClassEnrollment e inner join e.student.academicAreaClassifications aac " + "inner join e.student.posMajors m where e.student.session = :sessionId and " + "m.code=:major and aac.academicArea.academicAreaAbbreviation = :area and aac.academicClassification.code = :clasf") .setLong("sessionId", session.getUniqueId()) .setString("area", area) .setString("major", major) .setString("clasf", classification) .uniqueResult()).intValue(); } public static int guessEnrollmentFromLastLike(org.hibernate.Session hibSession, CourseOffering co, String area, String major, String classification) { if (co.getPermId() == null) return ((Number)hibSession.createQuery( "select count(distinct d.student) from LastLikeCourseDemand d inner join d.student.academicAreaClassifications aac " + "inner join d.student.posMajors m where d.subjectArea.uniqueId = :subjectId and d.courseNbr = :courseNbr and "+ "m.code=:major and aac.academicArea.academicAreaAbbreviation = :area and aac.academicClassification.code = :clasf") .setLong("subjectId", co.getSubjectArea().getUniqueId()) .setString("courseNbr", co.getCourseNbr()) .setString("area", area) .setString("major", major) .setString("clasf", classification) .uniqueResult()).intValue(); else return ((Number)hibSession.createQuery( "select count(distinct d.student) from LastLikeCourseDemand d inner join d.student.academicAreaClassifications aac " + "inner join d.student.posMajors m where d.subjectArea.session.uniqueId = :subjectId and d.coursePermId = :permId and " + "m.code=:major and aac.academicArea.academicAreaAbbreviation = :area and aac.academicClassification.code = :clasf") .setLong("sessionId", co.getSubjectArea().getSessionId()) .setString("permId", co.getPermId()) .setString("area", area) .setString("major", major) .setString("clasf", classification) .uniqueResult()).intValue(); } public static int guessEnrollmentFromReal(org.hibernate.Session hibSession, CourseOffering co, String area, String major, String classification) { if (true) return 0; return ((Number)hibSession.createQuery( "select count(distinct e.student) from StudentClassEnrollment e inner join e.student.academicAreaClassifications aac " + "inner join e.student.posMajors m where e.courseOffering.uniqueId = :courseId and " + "m.code=:major and aac.academicArea.academicAreaAbbreviation = :area and aac.academicClassification.code = :clasf") .setLong("courseId", co.getUniqueId()) .setString("area", area) .setString("major", major) .setString("clasf", classification) .uniqueResult()).intValue(); } public static List<CourseOffering> getCourses(org.hibernate.Session hibSession, Session session, String subject, String courseNbr) { List<CourseOffering> courses = (List<CourseOffering>)hibSession.createQuery( "from CourseOffering co where co.subjectArea.session.uniqueId = :sessionId and co.subjectArea.subjectAreaAbbreviation = :subject " + "and co.courseNbr like :courseNbr || '%' order by co.courseNbr") .setLong("sessionId", session.getUniqueId()) .setString("subject", subject) .setString("courseNbr", courseNbr) .list(); // filter out not offered courses, if possible boolean hasOffered = false; for (Iterator<CourseOffering> i = courses.iterator(); i.hasNext(); ) { CourseOffering co = i.next(); if (!co.getInstructionalOffering().getNotOffered()) hasOffered = true; } if (hasOffered) for (Iterator<CourseOffering> i = courses.iterator(); i.hasNext(); ) { CourseOffering co = i.next(); if (co.getInstructionalOffering().getNotOffered()) i.remove(); } return courses; } public static void main(String[] args) { try { ToolBox.configureLogging(); HibernateUtil.configureHibernate(ApplicationProperties.getProperties()); final org.hibernate.Session hibSession = new SessionDAO().getSession(); Session session = Session.getSessionUsingInitiativeYearTerm( ApplicationProperties.getProperty("initiative", "PWL"), ApplicationProperties.getProperty("year","2015"), ApplicationProperties.getProperty("term","Spring") ); if (session==null) { sLog.error("Academic session not found, use properties initiative, year, and term to set academic session."); System.exit(0); } else { sLog.info("Session: "+session); } String line; int bannerTerm = Integer.parseInt(new BannerTermProvider().getExternalTerm(new AcademicSessionInfo(session))); NumberFormat clasfFormat = new DecimalFormat("00"); sLog.info("Banner term: " + bannerTerm); Map<String, String> test = new HashMap<String, String>(); Map<String, Integer> counts = new HashMap<String, Integer>(); BufferedReader in = new BufferedReader(new FileReader("/Users/muller/Test/DegreeWorks/Student_counts_201510.csv")); while ((line = in.readLine()) != null) { String[] data = line.split(","); if ("SGBSTDN_MAJR_CODE_1".equals(data[0])) continue; String major = data[0]; int term = Integer.parseInt(data[2]); if (term > bannerTerm) continue; int code = ("Fall".equals(session.getAcademicTerm()) ? 1 : 2); while (term <= bannerTerm - 100) { code += 2; term += 100; } String key = major + ":" + clasfFormat.format(code); Integer count = counts.get(key); counts.put(key, count == null ? Integer.parseInt(data[4]) : Integer.parseInt(data[4]) + count.intValue()); test.put(data[2], clasfFormat.format(code)); } in.close(); sLog.info("Mapping " + ToolBox.dict2string(test, 2)); in = new BufferedReader(new FileReader("/Users/muller/Test/DegreeWorks/template_data.txt")); List<Template> templates = new ArrayList<Template>(); Template template = null; Pattern termPattern = Pattern.compile(session.getAcademicTerm() + " Term ([1-9]+)"); while ((line = in.readLine()) != null) { String[] data = line.split("!"); String recordType = data[0].split(":")[0]; if ("MST".equals(recordType)) { template = new Template(data[0].split(":")[1]); templates.add(template); sLog.info("Processing template " + template.getId() + " (description: " + data[1] + ", active:" + data[2] + ")"); } else if ("TERM".equals(recordType)) { Matcher m = termPattern.matcher(data[3]); if (m.matches()) { int code = 2 * (Integer.parseInt(m.group(1)) - 1) + ("Fall".equals(session.getAcademicTerm()) ? 1 : 2); Classification c = new Classification(template, data[1], clasfFormat.format(code)); template.addClassification(c); sLog.info(" added classification " + c.getCode()); } } else if ("TAG".equals(recordType)) { String code = data[1]; String value = data[2]; if ("COLLEGE".equals(code)) { sLog.info(" -- academic area " + value); template.setArea(value); } if ("MAJOR".equals(code)) { sLog.info(" -- major " + value); template.setMajor(value); } } else if ("GROUPMST".equals(recordType)) { Classification c = template.getClassification(data[3]); if (c == null) continue; Group g = new Group(c, data[1], "R" + c.getCode(), "REQ"); c.addGroup(g); sLog.info(" select all of (" + c + ")"); } else if ("CHOICEGROUP".equals(recordType)) { Classification c = template.getClassification(data[3]); if (c == null) continue; Group g = new Group(c, data[1], "O" + c.getCode() + "-" + data[5], "OPT"); c.addGroup(g); sLog.info(" select one of (" + c + ")"); } else if ("CLASSGROUP".equals(recordType)) { Group g = template.getGroup(data[3]); if (g == null) continue; Group x = new Group(g.getClassification(), data[1], "X" + g.getClassification().getCode() + "-" + data[5], "REQ"); x.setParentId(g.getId()); g.getClassification().addGroup(x);; sLog.info(" all of (" + x.getClassification() + ")"); } else if ("CLASS".equals(recordType)) { Group g = template.getGroup(data[2]); if (g == null) continue; List<CourseOffering> courses = getCourses(hibSession, session, data[5], data[6]); if (courses.isEmpty()) { sLog.info("Course " + data[5] + " " + data[6] + " does not exist."); continue; } else { for (CourseOffering course: courses) if (course.getInstructionalOffering().getNotOffered()) sLog.info("Course " + course.getCourseNameWithTitle() + " is not offered."); } Course course = new Course(g, data[5], data[6]); g.addCourse(course); sLog.info(" " + course + " (" + g.getClassification() + ")"); } else if ("CLASSGRP".equals(recordType)) { Group g = template.getGroup(data[2]); if (g == null) continue; List<CourseOffering> courses = getCourses(hibSession, session, data[5], data[6]); if (courses.isEmpty()) { sLog.info("Course " + data[5] + " " + data[6] + " does not exist."); continue; } Course course = new Course(g, data[5], data[6]); g.addCourse(course); sLog.info(" " + course + " (" + g.getClassification() + ")"); } else if ("PLACEHOLDER".equals(recordType)) { Group g = template.getGroup(data[2]); if (g == null) continue; sLog.info(" placeholder " + data[3] + " (" + data[4] + ", " + g.getClassification() + ")"); } else if ("NONCOURSE".equals(recordType)) { } else if ("GPA".equals(recordType)) { } else if ("TEST".equals(recordType)) { } else { sLog.error(" not recognized " + recordType); } } in.close(); Document document = DocumentHelper.createDocument(); Element root = document.addElement("curricula"); root.addAttribute("campus", session.getAcademicInitiative()); root.addAttribute("term", session.getAcademicTerm()); root.addAttribute("year", session.getAcademicYear()); root.addAttribute("created", new Date().toString()); NumberFormat shareFormat = new DecimalFormat("0.000"); for (Template t: templates) { Element curriculum = root.addElement("curriculum"); curriculum.addElement("academicArea").addAttribute("abbreviation", t.getArea()); curriculum.addElement("major").addAttribute("code", t.getMajor()); curriculum.addAttribute("abbreviation", t.getArea() + "/" + t.getMajor()); for (Classification c: t.getClassifications()) { Element clasf = curriculum.addElement("classification"); clasf.addElement("academicClassification").addAttribute("code", c.getCode()); // int enrollment = guessEnrollmentFromReal(hibSession, session, t.getArea(), t.getMajor(), c.getCode()); Integer enrollment = counts.get(t.getMajor() + ":" + c.getCode()); if (enrollment == null) enrollment = 0; clasf.addAttribute("enrollment", String.valueOf(enrollment)); for (Group g: c.getGroups()) { int mIdx = 0; if (g.hasParentId()) continue; int total = 0; Map<Long, Integer> enrollments = new HashMap<Long, Integer>(); Map<String, Integer> grtot = new HashMap<String, Integer>(); if ("OPT".equals(g.getType())) { for (Course r: g.getCourses()) { List<CourseOffering> courses = getCourses(hibSession, session, r.getSubject(), r.getCourseNbr()); for (CourseOffering co: courses) { int e = guessEnrollmentFromReal(hibSession, co, t.getArea(), t.getMajor(), c.getCode()); total += e; enrollments.put(co.getUniqueId(), e); if (co.getCourseNbr().equals(r.getCourseNbr())) break; } } for (Group h: c.getChildGroups(g)) { int m = 0; for (Course r: h.getCourses()) { int tot = 0; List<CourseOffering> courses = getCourses(hibSession, session, r.getSubject(), r.getCourseNbr()); for (CourseOffering co: courses) { int e = guessEnrollmentFromReal(hibSession, co, t.getArea(), t.getMajor(), c.getCode()); tot += e; enrollments.put(co.getUniqueId(), e); if (co.getCourseNbr().equals(r.getCourseNbr())) break; } if (m < tot) m = tot; } grtot.put(h.getId(), m); total += m; } } int size = g.getCourses().size(); for (Group h: c.getChildGroups(g)) if (!h.getCourses().isEmpty()) size ++; for (Course r: g.getCourses()) { List<CourseOffering> courses = getCourses(hibSession, session, r.getSubject(), r.getCourseNbr()); if (courses.isEmpty()) { sLog.info("Course " + r + " does not exist."); Element ce = clasf.addElement("course").addAttribute("subject", r.getSubject()).addAttribute("courseNbr", r.getCourseNbr()); ce.addElement("group").addAttribute("id", g.getId().trim()).addAttribute("name", g.getName()).addAttribute("type", g.getType()); if ("REQ".equals(g.getType())) ce.addAttribute("share", "1.0"); else if (total > 0) ce.addAttribute("share", "0.0"); else ce.addAttribute("share", shareFormat.format(1.0 / size)); } else if (courses.size() == 1 || courses.get(0).getCourseNbr().equals(r.getCourseNbr())) { CourseOffering co = courses.get(0); Element ce = clasf.addElement("course").addAttribute("subject", co.getSubjectAreaAbbv()).addAttribute("courseNbr", co.getCourseNbr()); ce.addElement("group").addAttribute("id", g.getId().trim()).addAttribute("name", g.getName()).addAttribute("type", g.getType()); if ("REQ".equals(g.getType())) ce.addAttribute("share", "1.0"); else if (total > 0) { Integer e = enrollments.get(co.getUniqueId()); if (e == null) ce.addAttribute("share", "0.0"); else { ce.addAttribute("share", shareFormat.format(((double)e) / total)); ce.addAttribute("enrollment", e.toString()); } } else ce.addAttribute("share", shareFormat.format(1.0 / size)); } else { sLog.info("Course " + r + " has multiple matches."); mIdx ++; if ("REQ".equals(g.getType())) { total = 0; for (CourseOffering co: courses) { int e = guessEnrollmentFromReal(hibSession, co, t.getArea(), t.getMajor(), c.getCode()); total += e; enrollments.put(co.getUniqueId(), e); } } for (int i = 0; i < courses.size(); i++) { CourseOffering co = courses.get(i); sLog.info(" -- " + co.getCourseNameWithTitle()); Element ce = clasf.addElement("course").addAttribute("subject", co.getSubjectAreaAbbv()).addAttribute("courseNbr", co.getCourseNbr()); if (i == 0) ce.addElement("group").addAttribute("id", g.getId().trim()).addAttribute("name", g.getName()).addAttribute("type", g.getType()); ce.addElement("group").addAttribute("id", g.getId().trim() + "-" + mIdx).addAttribute("name", g.getName() + "_" + mIdx).addAttribute("type", "OPT"); if (total > 0) { Integer e = enrollments.get(co.getUniqueId()); if (e == null) ce.addAttribute("share", "0.0"); else { ce.addAttribute("share", shareFormat.format(((double)e) / total)); ce.addAttribute("enrollment", e.toString()); } } else if ("REQ".equals(g.getType())) ce.addAttribute("share", shareFormat.format(1.0 / courses.size())); else ce.addAttribute("share", shareFormat.format(1.0 / size / courses.size())); } } } for (Group h: c.getChildGroups(g)) { int tot = grtot.get(h.getId()); for (int j = 0; j < h.getCourses().size(); j++) { Course r = h.getCourses().get(j); List<CourseOffering> courses = getCourses(hibSession, session, r.getSubject(), r.getCourseNbr()); if (courses.isEmpty()) { sLog.info("Course " + r + " does not exist."); Element ce = clasf.addElement("course").addAttribute("subject", r.getSubject()).addAttribute("courseNbr", r.getCourseNbr()); if (j == 0) ce.addElement("group").addAttribute("id", g.getId().trim()).addAttribute("name", g.getName()).addAttribute("type", g.getType()); ce.addElement("group").addAttribute("id", h.getId().trim()).addAttribute("name", h.getName()).addAttribute("type", h.getType()); if (total > 0) ce.addAttribute("share", shareFormat.format(((double)tot) / total)); else ce.addAttribute("share", shareFormat.format(1.0 / size)); } else if (courses.size() == 1 || courses.get(0).getCourseNbr().equals(r.getCourseNbr())) { CourseOffering co = courses.get(0); Element ce = clasf.addElement("course").addAttribute("subject", co.getSubjectAreaAbbv()).addAttribute("courseNbr", co.getCourseNbr()); if (j == 0) ce.addElement("group").addAttribute("id", g.getId().trim()).addAttribute("name", g.getName()).addAttribute("type", g.getType()); ce.addElement("group").addAttribute("id", h.getId().trim()).addAttribute("name", h.getName()).addAttribute("type", h.getType()); if (total > 0) ce.addAttribute("share", shareFormat.format(((double)tot) / total)); else ce.addAttribute("share", shareFormat.format(1.0 / size)); } else { sLog.info("Course " + r + " has multiple matches."); mIdx ++; for (int i = 0; i < courses.size(); i++) { CourseOffering co = courses.get(i); sLog.info(" -- " + co.getCourseNameWithTitle()); Element ce = clasf.addElement("course").addAttribute("subject", co.getSubjectAreaAbbv()).addAttribute("courseNbr", co.getCourseNbr()); if (j == 0 && i == 0) ce.addElement("group").addAttribute("id", g.getId().trim()).addAttribute("name", g.getName()).addAttribute("type", g.getType()); if (i == 0) ce.addElement("group").addAttribute("id", h.getId().trim()).addAttribute("name", h.getName()).addAttribute("type", h.getType()); ce.addElement("group").addAttribute("id", h.getId().trim() + "-" + mIdx).addAttribute("name", h.getName() + "_" + mIdx).addAttribute("type", "OPT"); if (total > 0) { Integer e = enrollments.get(co.getUniqueId()); if (e == null) ce.addAttribute("share", "0.0"); else { ce.addAttribute("share", shareFormat.format(((double)e) / total)); ce.addAttribute("enrollment", e.toString()); } } else ce.addAttribute("share", shareFormat.format(1.0 / size / courses.size())); } } } } } } } FileWriter out = new FileWriter("/Users/muller/Test/DegreeWorks/curricula_" + bannerTerm + ".xml"); new XMLWriter(out, OutputFormat.createPrettyPrint()).write(document); out.flush(); out.close(); HibernateUtil.closeHibernate(); sLog.info("All done."); } catch (Exception e) { sLog.error("Test failed: " + e.getMessage(), e); } } static class Template { String iId, iArea, iMajor; List<Classification> iClassifications = new ArrayList<Classification>(); Template(String id) { iId = id; } String getId() { return iId; } String getArea() { return iArea; } void setArea(String area) { iArea = area; } String getMajor() { return iMajor; } void setMajor(String major) { iMajor = major; } void addClassification(Classification c) { iClassifications.add(c); } Classification getClassification(String id) { for (Classification c: iClassifications) if (c.getId().equals(id)) return c; return null; } List<Classification> getClassifications() { return iClassifications; } Group getGroup(String id) { for (Classification c: iClassifications) for (Group g: c.getGroups()) if (g.getId().equals(id)) return g; return null; } @Override public String toString() { return iArea + "/" + iMajor; } } static class Classification { Template iTemplate; String iId; String iCode; List<Group> iGroups = new ArrayList<Group>(); int iEnrollment = 0; Classification(Template template, String id, String code) { iTemplate = template; iId = id; iCode = code; } Template getTemplate() { return iTemplate; } String getId() { return iId; } String getCode() { return iCode; } void addGroup(Group c) { iGroups.add(c); } Group getGroup(String id) { for (Group c: iGroups) if (c.getId().equals(id)) return c; return null; } List<Group> getGroups() { return iGroups; } List<Group> getChildGroups(Group group) { List<Group> ret = new ArrayList<Group>(); for (Group g: iGroups) if (group.getId().equals(g.getParentId())) ret.add(g); return ret; } int getEnrollment() { return iEnrollment; } void setEnrollment(int enrollment) { iEnrollment = enrollment; } @Override public String toString() { return iCode; } } static class Group { Classification iClassification; String iId; String iName; String iType; String iParentId = null; List<Course> iCourses = new ArrayList<Course>(); Group(Classification classification, String id, String name, String type) { iClassification = classification; iId = id; iName = name; iType = type; } Classification getClassification() { return iClassification; } String getId() { return iId; } String getName() { return iName; } String getType() { return iType; } boolean hasParentId() { return iParentId != null; } String getParentId() { return iParentId; } void setParentId(String parentId) { iParentId = parentId; } void addCourse(Course course) { iCourses.add(course); } List<Course> getCourses() { return iCourses; } @Override public String toString() { return iName; } } static class Course { Group iGroup; String iSubject; String iCourseNbr; float iShare = 1.0f; Course(Group group, String subject, String courseNbr) { iGroup = group; iSubject = subject; iCourseNbr = courseNbr; } Group getGroup() { return iGroup; } String getSubject() { return iSubject; } String getCourseNbr() { return iCourseNbr; } void setShare(float share) { iShare = share; } float getShare() { return iShare; } @Override public String toString() { return getSubject() + " " + getCourseNbr(); } } }
package ch.ethz.inf.vs.californium.test.lockstep; import static ch.ethz.inf.vs.californium.coap.CoAP.Code.GET; import static ch.ethz.inf.vs.californium.coap.CoAP.ResponseCode.CONTENT; import static ch.ethz.inf.vs.californium.coap.CoAP.Type.ACK; import static ch.ethz.inf.vs.californium.coap.CoAP.Type.CON; import static ch.ethz.inf.vs.californium.coap.CoAP.Type.NON; import static ch.ethz.inf.vs.californium.coap.CoAP.Type.RST; import java.net.InetAddress; import java.net.InetSocketAddress; import java.util.Random; import java.util.logging.Level; import java.util.logging.Logger; import junit.framework.Assert; import org.junit.After; import org.junit.Before; import org.junit.Test; import ch.ethz.inf.vs.californium.coap.CoAP.Type; import ch.ethz.inf.vs.californium.coap.Response; import ch.ethz.inf.vs.californium.network.config.NetworkConfig; import ch.ethz.inf.vs.californium.network.config.NetworkConfigDefaults; import ch.ethz.inf.vs.californium.server.Server; import ch.ethz.inf.vs.californium.server.resources.CoapExchange; import ch.ethz.inf.vs.californium.server.resources.ResourceBase; import ch.ethz.inf.vs.californium.test.BlockwiseTransferTest.ServerBlockwiseInterceptor; import ch.ethz.inf.vs.elements.UDPConnector; public class ObserveServerSide { private static boolean RANDOM_PAYLOAD_GENERATION = true; private Server server; private int serverPort; private int mid = 7000; private TestObserveResource testObsResource; private String respPayload; private Type respType; private int timeout = 100; private ServerBlockwiseInterceptor serverInterceptor = new ServerBlockwiseInterceptor(); @Before public void setupServer() { System.out.println("\nStart "+getClass().getSimpleName()); Logger ul = Logger.getLogger(UDPConnector.class.toString()); ul.setLevel(Level.OFF); LockstepEndpoint.DEFAULT_VERBOSE = false; testObsResource = new TestObserveResource("obs"); NetworkConfig config = new NetworkConfig() .setInt(NetworkConfigDefaults.ACK_TIMEOUT, timeout) .setFloat(NetworkConfigDefaults.ACK_RANDOM_FACTOR, 1.0f) .setInt(NetworkConfigDefaults.ACK_TIMEOUT_SCALE, 1) .setInt(NetworkConfigDefaults.MAX_MESSAGE_SIZE, 32) .setInt(NetworkConfigDefaults.DEFAULT_BLOCK_SIZE, 32); server = new Server(config, 0); server.add(testObsResource); server.getEndpoints().get(0).addInterceptor(serverInterceptor); server.start(); serverPort = server.getEndpoints().get(0).getAddress().getPort(); System.out.println("Server binds to port "+serverPort); } @After public void shutdownServer() { System.out.println(); server.destroy(); System.out.println("End "+getClass().getSimpleName()); } @Test public void test() throws Throwable { try { testEstablishmentAndTimeout(); testEstablishmentAndTimeoutWithUpdateInMiddle(); testEstablishmentAndRejectCancellation(); // testObserveWithBlock(); // TODO testNON(); testNONWithBlock(); testQuickChangeAndTimeout(); } catch (Exception e) { e.printStackTrace(); throw e; } catch (Throwable t) { System.err.println(t); throw t; } } private void testEstablishmentAndTimeout() throws Exception { System.out.println("Establish an observe relation. Cancellation after timeout"); respPayload = generatePayload(30); byte[] tok = generateNextToken(); String path = "obs"; LockstepEndpoint client = createLockstepEndpoint(); respType = null; client.sendRequest(CON, GET, tok, ++mid).path(path).observe(0).go(); client.expectResponse(ACK, CONTENT, tok, mid).storeObserve("Z").payload(respPayload).go(); Assert.assertEquals("Resource has established relation:", 1, testObsResource.getObserverCount()); serverInterceptor.log("\nObserve relation established"); // First notification respType = NON; testObsResource.change("First notification"); client.expectResponse().type(NON).code(CONTENT).token(tok).checkObs("Z", "A").payload(respPayload).go(); // Second notification testObsResource.change("Second notification"); client.expectResponse().type(NON).code(CONTENT).token(tok).checkObs("A", "B").payload(respPayload).go(); // Third notification respType = CON; testObsResource.change("Third notification"); client.expectResponse().type(CON).code(CONTENT).token(tok).storeMID("MID").checkObs("B", "C").payload(respPayload).go(); client.sendEmpty(ACK).loadMID("MID").go(); // Forth notification respType = NON; testObsResource.change("Forth notification"); client.expectResponse().type(NON).code(CONTENT).token(tok).checkObs("C", "D").payload(respPayload).go(); // Fifth notification respType = CON; testObsResource.change("Fifth notification"); client.expectResponse().type(CON).code(CONTENT).token(tok).storeMID("MID").checkObs("D", "E").payload(respPayload).go(); serverInterceptor.log("// lost"); client.expectResponse().type(CON).code(CONTENT).token(tok).loadMID("MID").loadObserve("E").payload(respPayload).go(); serverInterceptor.log("// lost"); client.expectResponse().type(CON).code(CONTENT).token(tok).loadMID("MID").loadObserve("E").payload(respPayload).go(); serverInterceptor.log("// lost"); client.expectResponse().type(CON).code(CONTENT).token(tok).loadMID("MID").loadObserve("E").payload(respPayload).go(); serverInterceptor.log("// lost"); client.expectResponse().type(CON).code(CONTENT).token(tok).loadMID("MID").loadObserve("E").payload(respPayload).go(); serverInterceptor.log("// lost"); Thread.sleep(timeout+100); Assert.assertEquals("Resource has not removed relation:", 0, testObsResource.getObserverCount()); printServerLog(); } private void testEstablishmentAndTimeoutWithUpdateInMiddle() throws Exception { System.out.println("Establish an observe relation. Cancellation after timeout. During the timeouts, the resource still changes."); respPayload = generatePayload(30); byte[] tok = generateNextToken(); String path = "obs"; LockstepEndpoint client = createLockstepEndpoint(); respType = null; client.sendRequest(CON, GET, tok, ++mid).path(path).observe(0).go(); client.expectResponse(ACK, CONTENT, tok, mid).storeObserve("A").payload(respPayload).go(); Assert.assertEquals("Resource has established relation:", 1, testObsResource.getObserverCount()); serverInterceptor.log("\nObserve relation established"); // First notification respType = CON; testObsResource.change("First notification "+generatePayload(10)); client.expectResponse().type(CON).code(CONTENT).token(tok).storeMID("MID").checkObs("A", "B").payload(respPayload).go(); serverInterceptor.log("// lost "); client.expectResponse().type(CON).code(CONTENT).token(tok).loadMID("MID").loadObserve("B").payload(respPayload).go(); serverInterceptor.log("// lost (1. retransmission)"); // Resource changes and sends next CON which will be transmitted after the former has timeouted testObsResource.change("Second notification "+generatePayload(10)); client.expectResponse().type(CON).code(CONTENT).token(tok).storeMID("MID").checkObs("B", "C").payload(respPayload).go(); serverInterceptor.log("// lost (2. retransmission)"); // Resource changes. Even though the next notification is a NON it becomes // a CON because it replaces the retransmission of the former CON control notifiation respType = NON; testObsResource.change("Third notification "+generatePayload(10)); client.expectResponse().type(CON).code(CONTENT).token(tok).storeMID("MID").checkObs("C", "D").payload(respPayload).go(); serverInterceptor.log("// lost (3. retransmission)"); client.expectResponse().type(CON).code(CONTENT).token(tok).loadMID("MID").loadObserve("D").payload(respPayload).go(); serverInterceptor.log("// lost (4. retransmission)"); Thread.sleep(timeout+100); Assert.assertEquals("Resource has not removed relation:", 0, testObsResource.getObserverCount()); printServerLog(); } private void testEstablishmentAndRejectCancellation() throws Exception { System.out.println("Establish an observe relation. Cancellation due to a reject from the client"); respPayload = generatePayload(30); byte[] tok = generateNextToken(); String path = "obs"; LockstepEndpoint client = createLockstepEndpoint(); respType = null; client.sendRequest(CON, GET, tok, ++mid).path(path).observe(0).go(); client.expectResponse(ACK, CONTENT, tok, mid).storeObserve("A").payload(respPayload).go(); Assert.assertEquals("Resource has established relation:", 1, testObsResource.getObserverCount()); serverInterceptor.log("\nObserve relation established"); // First notification respType = CON; testObsResource.change("First notification "+generatePayload(10)); client.expectResponse().type(CON).code(CONTENT).token(tok).storeMID("MID").checkObs("A", "B").payload(respPayload).go(); serverInterceptor.log("// lost "); client.expectResponse().type(CON).code(CONTENT).token(tok).loadMID("MID").loadObserve("B").payload(respPayload).go(); System.out.println("Reject notification"); client.sendEmpty(RST).loadMID("MID").go(); Thread.sleep(100); Assert.assertEquals("Resource has not removed relation:", 0, testObsResource.getObserverCount()); printServerLog(); } private void testObserveWithBlock() throws Exception { System.out.println("Observe with blockwise"); respPayload = generatePayload(80); byte[] tok = generateNextToken(); String path = "obs"; // Establish relation LockstepEndpoint client = createLockstepEndpoint(); respType = null; client.sendRequest(CON, GET, tok, ++mid).path(path).observe(0).go(); client.expectResponse(ACK, CONTENT, tok, mid).storeObserve("A").block2(0, true, 32).payload(respPayload, 0, 32).go(); byte[] tok2 = generateNextToken(); client.sendRequest(CON, GET, tok2, ++mid).path(path).block2(1, false, 32).go(); client.expectResponse(ACK, CONTENT, tok2, mid).block2(1, true, 32).payload(respPayload, 32, 64).go(); client.sendRequest(CON, GET, tok2, ++mid).path(path).block2(2, false, 32).go(); client.expectResponse(ACK, CONTENT, tok2, mid).block2(2, false, 32).payload(respPayload, 64, 80).go(); // // First notification serverInterceptor.log("\n === changed ==="); respType = CON; testObsResource.change(generatePayload(80)); client.expectResponse().type(CON).code(CONTENT).token(tok).storeMID("MID").checkObs("A", "B").block2(0, true, 32).payload(respPayload, 0, 32).go(); client.sendEmpty(ACK).loadMID("MID").go(); Thread.sleep(100); testObsResource.change(generatePayload(80)); byte[] tok3 = generateNextToken(); client.sendRequest(CON, GET, tok3, ++mid).path(path).block2(1, false, 32).go(); client.expectResponse(ACK, CONTENT, tok3, mid).block2(1, true, 32).payload(respPayload, 32, 64).go(); client.sendRequest(CON, GET, tok3, ++mid).path(path).block2(2, false, 32).go(); client.expectResponse(ACK, CONTENT, tok3, mid).block2(2, false, 32).payload(respPayload, 64, 80).go(); Thread.sleep(timeout+100); // Assert.assertEquals("Resource has not removed relation:", 0, testObsResource.getObserverCount()); printServerLog(); } private LockstepEndpoint createLockstepEndpoint() { try { LockstepEndpoint endpoint = new LockstepEndpoint(); endpoint.setDestination(new InetSocketAddress(InetAddress.getByName("localhost"), serverPort)); return endpoint; } catch (Exception e) { throw new RuntimeException(e); } } private void printServerLog() { System.out.println(serverInterceptor.toString()); serverInterceptor.clear(); } private static int currentToken = 10; private static byte[] generateNextToken() { return b(++currentToken); } private static byte[] b(int... is) { byte[] bytes = new byte[is.length]; for (int i=0;i<bytes.length;i++) bytes[i] = (byte) is[i]; return bytes; } private static String generatePayload(int length) { StringBuffer buffer = new StringBuffer(); if (RANDOM_PAYLOAD_GENERATION) { Random rand = new Random(); while(buffer.length() < length) { buffer.append(rand.nextInt()); } } else { // Deterministic payload int n = 1; while(buffer.length() < length) { buffer.append(n++); } } return buffer.substring(0, length); } // All tests are made with this resource private class TestObserveResource extends ResourceBase { public TestObserveResource(String name) { super(name); setObservable(true); } public void handleGET(CoapExchange exchange) { Response response = new Response(CONTENT); response.setType(respType); response.setPayload(respPayload); exchange.respond(response); } public void change(String newPayload) { System.out.println("Resource changed: "+newPayload); respPayload = newPayload; changed(); } } private void testNON() throws Exception { System.out.println("Establish an observe relation and receive NON notifications"); respPayload = generatePayload(30); byte[] tok = generateNextToken(); String path = "obs"; LockstepEndpoint client = createLockstepEndpoint(); respType = null; client.sendRequest(NON, GET, tok, ++mid).path(path).observe(0).go(); client.expectResponse().type(NON).code(CONTENT).token(tok).storeObserve("A").payload(respPayload).go(); Assert.assertEquals("Resource has established relation:", 1, testObsResource.getObserverCount()); serverInterceptor.log("\nObserve relation established"); // First notification testObsResource.change("First notification "+generatePayload(10)); client.expectResponse().type(NON).code(CONTENT).token(tok).storeMID("MID").checkObs("A", "B").payload(respPayload).go(); respType = CON; testObsResource.change("Second notification "+generatePayload(10)); client.expectResponse().type(CON).code(CONTENT).token(tok).storeMID("MID").checkObs("B", "C").payload(respPayload).go(); /* In transit */ { respType = NON; testObsResource.change("Third notification "+generatePayload(10)); // resource postpones third notification } client.sendEmpty(ACK).loadMID("MID").go(); // resource releases third notification client.expectResponse().type(NON).code(CONTENT).token(tok).storeMID("MID").checkObs("C", "D").payload(respPayload).go(); System.out.println("Reject notification"); client.sendEmpty(RST).loadMID("MID").go(); Thread.sleep(100); Assert.assertEquals("Resource has not removed relation:", 0, testObsResource.getObserverCount()); printServerLog(); } private void testNONWithBlock() throws Exception { System.out.println("Establish an observe relation and receive NON notifications"); respPayload = generatePayload(30); byte[] tok = generateNextToken(); String path = "obs"; LockstepEndpoint client = createLockstepEndpoint(); respType = null; client.sendRequest(NON, GET, tok, ++mid).path(path).observe(0).block2(0, false, 32).go(); client.expectResponse().type(NON).code(CONTENT).token(tok).storeObserve("A").payload(respPayload).go(); Assert.assertEquals("Resource has established relation:", 1, testObsResource.getObserverCount()); serverInterceptor.log("\nObserve relation established"); // First notification testObsResource.change("First notification "+generatePayload(10)); client.expectResponse().type(NON).code(CONTENT).token(tok).storeMID("MID").checkObs("A", "B").payload(respPayload).go(); respType = CON; testObsResource.change("Second notification "+generatePayload(10)); client.expectResponse().type(CON).code(CONTENT).token(tok).storeMID("MID").checkObs("B", "C").payload(respPayload).go(); /* In transit */ { respType = NON; testObsResource.change("Third notification "+generatePayload(10)); // resource postpones third notification } client.sendEmpty(ACK).loadMID("MID").go(); // resource releases third notification client.expectResponse().type(NON).code(CONTENT).token(tok).storeMID("MID").checkObs("C", "D").payload(respPayload).go(); testObsResource.change("Fourth notification "+generatePayload(10)); client.expectResponse().type(NON).code(CONTENT).token(tok).storeMID("MID").checkObs("C", "D").payload(respPayload).go(); System.out.println("Reject notification"); client.sendEmpty(RST).loadMID("MID").go(); Thread.sleep(100); Assert.assertEquals("Resource has not removed relation:", 0, testObsResource.getObserverCount()); printServerLog(); } private void testQuickChangeAndTimeout() throws Exception { System.out.println("Establish an observe relation to a quickly changing resource and do no longer respond"); respPayload = generatePayload(20); byte[] tok = generateNextToken(); String path = "obs"; LockstepEndpoint client = createLockstepEndpoint(); respType = null; client.sendRequest(CON, GET, tok, ++mid).path(path).observe(0).go(); client.expectResponse(ACK, CONTENT, tok, mid).storeObserve("A").payload(respPayload).go(); Assert.assertEquals("Resource has established relation:", 1, testObsResource.getObserverCount()); serverInterceptor.log("\nObserve relation established"); // First notification testObsResource.change("First notification "+generatePayload(10)); client.expectResponse().type(NON).code(CONTENT).token(tok).storeMID("MID").checkObs("A", "B").payload(respPayload).go(); // Now client crashes and no longer responds respType = CON; testObsResource.change("Second notification "+generatePayload(10)); client.expectResponse().type(CON).code(CONTENT).token(tok).checkObs("B", "C").payload(respPayload).go(); respType = NON; testObsResource.change("NON notification 1 "+generatePayload(10)); client.expectResponse().type(CON).code(CONTENT).token(tok).checkObs("B", "B").payload(respPayload).go(); testObsResource.change("NON notification 2 "+generatePayload(10)); client.expectResponse().type(CON).code(CONTENT).token(tok).checkObs("B", "B").payload(respPayload).go(); testObsResource.change("NON notification 3 "+generatePayload(10)); client.expectResponse().type(CON).code(CONTENT).token(tok).checkObs("B", "B").payload(respPayload).go(); testObsResource.change("NON notification 4 "+generatePayload(10)); client.expectResponse().type(CON).code(CONTENT).token(tok).checkObs("B", "B").payload(respPayload).go(); serverInterceptor.log("\n server cancels the relation"); Thread.sleep(timeout+100); Assert.assertEquals("Resource has not removed relation:", 0, testObsResource.getObserverCount()); printServerLog(); } }
/* * Copyright 2015 John Ahlroos * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package com.haulmont.cuba.web.widgets.client.addons.dragdroplayouts.ui.util; import com.google.gwt.core.client.GWT; import com.google.gwt.dom.client.Element; import com.google.gwt.dom.client.NativeEvent; import com.google.gwt.event.dom.client.*; import com.google.gwt.event.shared.HandlerRegistration; import com.google.gwt.user.client.ui.Widget; import com.haulmont.cuba.web.widgets.client.addons.dragdroplayouts.ui.VDDAbstractDropHandler; import com.haulmont.cuba.web.widgets.client.addons.dragdroplayouts.ui.interfaces.VDDHasDropHandler; import com.vaadin.client.BrowserInfo; import com.vaadin.client.ComponentConnector; import com.vaadin.client.Util; import com.vaadin.client.ui.dd.VDragAndDropManager; import com.vaadin.client.ui.dd.VDragEvent; import com.vaadin.client.ui.dd.VTransferable; import java.util.ArrayList; import java.util.List; /** * Provides HTML5 drops for any connector * * @author John Ahlroos / www.jasoft.fi */ public class HTML5Support { protected static DragOverHandler globalDragOverHandler = null; protected static DropHandler globalDropHandler = null; protected static DragEnterHandler globalDragEnterHandler = null; private final List<HandlerRegistration> handlers = new ArrayList<HandlerRegistration>(); public static class HTML5DragHandler implements DragEnterHandler, DragOverHandler, DropHandler { private VDragEvent vaadinDragEvent; private ComponentConnector connector; private VDDAbstractDropHandler<? extends Widget> dropHandler; public HTML5DragHandler(ComponentConnector connector, VDDAbstractDropHandler<? extends Widget> handler) { this.connector = connector; this.dropHandler = handler; } @Override public void onDrop(DropEvent event) { NativeEvent nativeEvent = event.getNativeEvent(); if (validate(nativeEvent) && vaadinDragEvent != null) { nativeEvent.preventDefault(); nativeEvent.stopPropagation(); // event stopped, just notify global handler // Haulmont API if (globalDropHandler != null) { globalDropHandler.onDrop(event); } vaadinDragEvent.setCurrentGwtEvent(nativeEvent); VDragAndDropManager.get().setCurrentDropHandler(dropHandler); // FIXME only text currently supported String data; if (BrowserInfo.get().isIE()) { // IE does not support MIME types // http://www.developerfusion.com/article/144828/the-html5-drag-and-drop-api/ data = event.getData("text"); } else { data = event.getData("text/plain"); } vaadinDragEvent.getTransferable().setData("html5Data", data); VDragAndDropManager.get().endDrag(); vaadinDragEvent = null; } } @Override public void onDragOver(DragOverEvent event) { NativeEvent nativeEvent = event.getNativeEvent(); if (validate(nativeEvent) && vaadinDragEvent != null) { nativeEvent.preventDefault(); nativeEvent.stopPropagation(); // event stopped, just notify global handler // Haulmont API if (globalDragOverHandler != null) { globalDragOverHandler.onDragOver(event); } vaadinDragEvent.setCurrentGwtEvent(nativeEvent); VDragAndDropManager.get().setCurrentDropHandler(dropHandler); dropHandler.dragOver(vaadinDragEvent); } } @Override public void onDragEnter(DragEnterEvent event) { NativeEvent nativeEvent = event.getNativeEvent(); if (validate(nativeEvent)) { VTransferable transferable = new VTransferable(); transferable.setDragSource(connector); vaadinDragEvent = VDragAndDropManager.get() .startDrag(transferable, event.getNativeEvent(), false); vaadinDragEvent.setCurrentGwtEvent(nativeEvent); VDragAndDropManager.get().setCurrentDropHandler(dropHandler); dropHandler.dragEnter(vaadinDragEvent); nativeEvent.preventDefault(); nativeEvent.stopPropagation(); } else if (vaadinDragEvent != null && Element.is(nativeEvent.getEventTarget())) { vaadinDragEvent.setCurrentGwtEvent(nativeEvent); VDragAndDropManager.get().setCurrentDropHandler(null); VDragAndDropManager.get().interruptDrag(); vaadinDragEvent = null; nativeEvent.preventDefault(); nativeEvent.stopPropagation(); } if (globalDragEnterHandler != null) { globalDragEnterHandler.onDragEnter(event); } } private boolean validate(NativeEvent event) { if (!Element.is(event.getEventTarget())) { return false; } Element target = Element.as(event.getEventTarget()); Widget widget = Util.findWidget(target, null); if (widget == null) { return false; } ComponentConnector connector = Util.findConnectorFor(widget); while (connector == null) { widget = widget.getParent(); connector = Util.findConnectorFor(widget); } if (this.connector == connector) { return true; } else if (connector == null) { return false; } else if (connector.getWidget() instanceof VDDHasDropHandler) { // Child connector handles its own drops return false; } // Over non droppable child return true; } } public static final HTML5Support enable(final ComponentConnector connector, final VDDAbstractDropHandler<? extends Widget> handler) { if (handler == null) { return null; } Widget w = connector.getWidget(); final HTML5Support support = GWT.create(HTML5Support.class); final HTML5DragHandler dragHandler = new HTML5DragHandler(connector, handler); support.handlers .add(w.addDomHandler(dragHandler, DragEnterEvent.getType())); support.handlers .add(w.addDomHandler(dragHandler, DragOverEvent.getType())); support.handlers.add(w.addDomHandler(dragHandler, DropEvent.getType())); return support; } private HTML5Support() { // Factory } public void disable() { for (HandlerRegistration handlerRegistration : handlers) { handlerRegistration.removeHandler(); } handlers.clear(); } // Haulmont API public static DragOverHandler getGlobalDragOverHandler() { return globalDragOverHandler; } // Haulmont API public static void setGlobalDragOverHandler(DragOverHandler globalDragOverHandler) { HTML5Support.globalDragOverHandler = globalDragOverHandler; } // Haulmont API public static DropHandler getGlobalDropHandler() { return globalDropHandler; } // Haulmont API public static void setGlobalDropHandler(DropHandler globalDropHandler) { HTML5Support.globalDropHandler = globalDropHandler; } // Haulmont API public static DragEnterHandler getGlobalDragEnterHandler() { return globalDragEnterHandler; } // Haulmont API public static void setGlobalDragEnterHandler(DragEnterHandler globalDragEnterHandler) { HTML5Support.globalDragEnterHandler = globalDragEnterHandler; } }
package eu.uqasar.web.dashboard.widget.tech_debt; /* * #%L * U-QASAR * %% * Copyright (C) 2012 - 2015 U-QASAR Consortium * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.List; import javax.naming.InitialContext; import javax.naming.NamingException; import org.apache.wicket.markup.html.panel.Panel; import org.apache.wicket.model.Model; //import org.wicketstuff.googlecharts.AbstractChartData; //import org.wicketstuff.googlecharts.Chart; //import org.wicketstuff.googlecharts.ChartAxis; //import org.wicketstuff.googlecharts.ChartAxisType; //import org.wicketstuff.googlecharts.ChartDataEncoding; //import org.wicketstuff.googlecharts.ChartProvider; //import org.wicketstuff.googlecharts.ChartType; //import org.wicketstuff.googlecharts.IChartGrid; import ro.fortsoft.wicket.dashboard.AbstractWidget; import ro.fortsoft.wicket.dashboard.Widget; import ro.fortsoft.wicket.dashboard.web.WidgetView; import com.googlecode.wickedcharts.highcharts.options.Axis; import com.googlecode.wickedcharts.highcharts.options.ChartOptions; import com.googlecode.wickedcharts.highcharts.options.CreditOptions; import com.googlecode.wickedcharts.highcharts.options.Cursor; import com.googlecode.wickedcharts.highcharts.options.DataLabels; import com.googlecode.wickedcharts.highcharts.options.Function; import com.googlecode.wickedcharts.highcharts.options.Global; import com.googlecode.wickedcharts.highcharts.options.HorizontalAlignment; import com.googlecode.wickedcharts.highcharts.options.Labels; import com.googlecode.wickedcharts.highcharts.options.Legend; import com.googlecode.wickedcharts.highcharts.options.LegendLayout; import com.googlecode.wickedcharts.highcharts.options.Options; import com.googlecode.wickedcharts.highcharts.options.Overflow; import com.googlecode.wickedcharts.highcharts.options.PlotOptions; import com.googlecode.wickedcharts.highcharts.options.PlotOptionsChoice; import com.googlecode.wickedcharts.highcharts.options.SeriesType; import com.googlecode.wickedcharts.highcharts.options.Title; import com.googlecode.wickedcharts.highcharts.options.Tooltip; import com.googlecode.wickedcharts.highcharts.options.VerticalAlignment; import com.googlecode.wickedcharts.highcharts.options.color.HexColor; import com.googlecode.wickedcharts.highcharts.options.color.NullColor; import com.googlecode.wickedcharts.highcharts.options.functions.PercentageAndValueFormatter; import com.googlecode.wickedcharts.highcharts.options.functions.PercentageFormatter; import com.googlecode.wickedcharts.highcharts.options.series.Point; import com.googlecode.wickedcharts.highcharts.options.series.PointSeries; import com.googlecode.wickedcharts.highcharts.options.series.SimpleSeries; import eu.uqasar.model.measure.JiraMetricMeasurement; import eu.uqasar.model.tree.Project; import eu.uqasar.service.tree.TreeNodeService; /** * */ public class TechDebtChartWidget extends AbstractWidget{ /** * */ private static final long serialVersionUID = 376923874435436400L; private static final String BAR_TYPE = "BAR"; private static final String PIE_TYPE = "PIE"; public static final List<String> TYPES = Arrays.asList(PIE_TYPE, BAR_TYPE); private static TechDebtChartFactory chartDataFactory; public TechDebtChartWidget() { super(); title = "Technical Debt Overview"; } @Override public void init() { if (!settings.containsKey("chartType")) { settings.put("chartType", TechDebtChartWidget.BAR_TYPE); } } public static TechDebtChartFactory getChartDataFactory() { return chartDataFactory; } public static void setChartDataFactory(TechDebtChartFactory chartDataFactory) { TechDebtChartWidget.chartDataFactory = chartDataFactory; } public String getChartData() { if (chartDataFactory == null) { throw new RuntimeException("ChartDataFactory cannot be null. Use ChartWidget.setChartDataFactory(...)"); } return chartDataFactory.createChart(this); } public WidgetView createView(String viewId) { return new TechDebtChartWidgetView(viewId, new Model<Widget>(this)); } @Override public boolean hasSettings() { return true; } @Override public Panel createSettingsPanel(String settingsPanelId) { return new TechDebtChartSettingsPanel(settingsPanelId, new Model<>(this)); } /* public Chart createChart() { AbstractChartData data = new AbstractChartData(ChartDataEncoding.TEXT, 100) { private static final long serialVersionUID = 1L; double met1 = 0; // Critical Issues double met2 = 0; // Blocking Issues double met3 = 0; // Resolved Issues public double[][] getData() { try { Project project = null; InitialContext ic = new InitialContext(); TreeNodeService dataService = (TreeNodeService) ic.lookup("java:module/TreeNodeService"); // Obtain project from the settings if (settings.get("project") != null) { project = dataService.getProjectByName(settings.get("project")); } else { if (dataService != null) { project = dataService.getProjectByName("U-QASAR Platform Development"); } } Iterator itr = project.getJirameasurements().iterator(); while (itr.hasNext()) { JiraMetricMeasurement jmeas = (JiraMetricMeasurement) itr.next(); String metric = jmeas.getJiraMetric(); if (metric.equalsIgnoreCase("UNRESOLVED_TASK_ISSUES_PER_PROJECT")) met1++; if (metric.equalsIgnoreCase("UNRESOLVED_ISSUES_PER_PROJECT")) met2++; if (metric.equalsIgnoreCase("FIXED_ISSUES_PER_PROJECT")) met3++; }// while } catch (NamingException e) { e.printStackTrace(); } System.out.println("met1 (unresolved task issues): " +met1); System.out.println("met2 (unresolved issues): " +met2); System.out.println("met3 (fixed issues): " +met3); return new double[][]{{met1}, {met2}, {met3}}; } }; ChartProvider provider = new ChartProvider(new Dimension(300, 150), ChartType.BAR_HORIZONTAL_GROUP, data); provider.setLegend(new String[]{"Unresolved Task Issues", "Unresolved Issues", "Fixed Issues"}); provider.setColors(new java.awt.Color[]{java.awt.Color.RED, java.awt.Color.GREEN, java.awt.Color.BLUE}); provider.setBarGroupSpacing(0); provider.setTitle("Technical Debt"); ChartAxis axis2 = new ChartAxis(ChartAxisType.BOTTOM); axis2.setPositions(new double[]{0, 50, 100}); provider.addAxis(axis2); Chart chart = new org.wicketstuff.googlecharts.Chart("jiratdChart", provider); return chart; } */ /** * * @return */ public Options getOptions() { TreeNodeService dataService = null; Project project = null; try { InitialContext ic = new InitialContext(); dataService = (TreeNodeService) ic.lookup("java:module/TreeNodeService"); // Obtain project from the settings if (settings.get("project") != null) { project = dataService.getProjectByName(settings.get("project")); } else { if (dataService != null) { project = dataService.getProjectByName("U-QASAR Platform Development"); } } } catch (NamingException e) { e.printStackTrace(); } if (project == null) { return new Options(); } // Use bar chart as default, but attempt to get the value from the settings SeriesType seriesType = SeriesType.BAR; if (SeriesType.valueOf(settings.get("chartType")) != null) { seriesType = SeriesType.valueOf(settings.get("chartType")); } Iterator itr = project.getJirameasurements().iterator(); int met1 = 0; // Critical Issues int met2 = 0; // Blocking Issues int met3 = 0; // Resolved Issues while (itr.hasNext()) { JiraMetricMeasurement jmeas = (JiraMetricMeasurement) itr.next(); String metric = jmeas.getJiraMetric(); if (metric.equalsIgnoreCase("UNRESOLVED_TASK_ISSUES_PER_PROJECT")) met1++; if (metric.equalsIgnoreCase("UNRESOLVED_ISSUES_PER_PROJECT")) met2++; if (metric.equalsIgnoreCase("FIXED_ISSUES_PER_PROJECT")) met3++; }// while // Bar chart if (seriesType.equals(SeriesType.BAR)) { // --------------- Add Bar Chart for Issues classification --------------- Options barOptions = new Options(); barOptions.setChartOptions(new ChartOptions().setType(SeriesType.BAR)); barOptions.setGlobal(new Global().setUseUTC(Boolean.TRUE)); barOptions.setTitle(new Title("Technical Dept Overview - Absolute View")); List<String> barcategories = new ArrayList(); barcategories.add("Category of Issues"); barOptions.setxAxis(new Axis().setCategories(barcategories).setTitle( new Title(null))); barOptions.setyAxis(new Axis().setTitle( new Title("Number of issues") .setAlign(HorizontalAlignment.HIGH)).setLabels( new Labels().setOverflow(Overflow.JUSTIFY))); barOptions.setTooltip(new Tooltip().setFormatter(new Function( "return ''+this.series.name +': '+ this.y;"))); barOptions.setPlotOptions(new PlotOptionsChoice() .setBar(new PlotOptions().setDataLabels(new DataLabels() .setEnabled(Boolean.TRUE)))); barOptions.setLegend(new Legend().setLayout(LegendLayout.VERTICAL) .setAlign(HorizontalAlignment.RIGHT) .setVerticalAlign(VerticalAlignment.TOP).setX(-50).setY(180) .setFloating(Boolean.TRUE).setBorderWidth(1) .setBackgroundColor(new HexColor("#ffffff")) .setShadow(Boolean.TRUE)); barOptions.setCredits(new CreditOptions().setEnabled(Boolean.FALSE)); List<Number> numlist1 = new ArrayList(); numlist1.add(met1); barOptions.addSeries(new SimpleSeries().setName("Blocking Issues") .setData(met1)); barOptions.addSeries(new SimpleSeries().setName("Critical Issues") .setData(met2)); barOptions.addSeries(new SimpleSeries().setName("Resolved Issues") .setData(met3)); return barOptions; } // ---------add PieChart --------------- Options pieOptions = new Options(); pieOptions.setChartOptions(new ChartOptions() .setPlotBackgroundColor(new NullColor()) .setPlotBorderWidth(null).setPlotShadow(Boolean.FALSE)); pieOptions.setTitle(new Title( "Technical Dept Overview - Normalized View")); pieOptions.setTooltip(new Tooltip().setFormatter( new PercentageFormatter()).setPercentageDecimals(1)); pieOptions .setPlotOptions(new PlotOptionsChoice() .setPie(new PlotOptions() .setAllowPointSelect(Boolean.TRUE) .setCursor(Cursor.POINTER) .setShowInLegend(Boolean.TRUE) .setDataLabels( new DataLabels() .setEnabled(Boolean.TRUE) .setColor( new HexColor("#000000")) .setConnectorColor( new HexColor("#000000")) .setFormatter( new PercentageAndValueFormatter())))); pieOptions.addSeries(new PointSeries() .setType(SeriesType.PIE) .setName("Issues") .addPoint(new Point("Blocking Issues", met1)) .addPoint(new Point("Critical Issues", met2)) .addPoint( new Point("Resolved Issues", met3).setSliced( Boolean.TRUE).setSelected(Boolean.TRUE))); return pieOptions; } }
/* * Copyright (c) 2003-2011, Simon Brown * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * - Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * - Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in * the documentation and/or other materials provided with the * distribution. * * - Neither the name of Pebble nor the names of its contributors may * be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ package net.sourceforge.pebble.domain; import net.sourceforge.pebble.Constants; import net.sourceforge.pebble.PebbleContext; import net.sourceforge.pebble.event.DefaultEventDispatcher; import net.sourceforge.pebble.api.event.blog.BlogEvent; import net.sourceforge.pebble.api.event.blog.BlogListener; import net.sourceforge.pebble.logging.CombinedLogFormatLogger; import net.sourceforge.pebble.permalink.DefaultPermalinkProvider; import java.io.File; import java.util.*; /** * Tests for the Blog class. * * @author Simon Brown */ public class BlogTest extends SingleBlogTestCase { public void testConstructionOfDefaultInstance() { assertEquals(new File(TEST_BLOG_LOCATION, "blogs/default").getAbsolutePath(), blog.getRoot()); assertNull(blog.getBlog()); assertEquals("My blog", blog.getName()); assertEquals("", blog.getDescription()); assertEquals("Blog Owner", blog.getAuthor()); assertEquals("blog@yourdomain.com", blog.getEmail()); assertEquals(TimeZone.getTimeZone("Europe/London"), blog.getTimeZone()); assertEquals("en", blog.getLanguage()); assertEquals("GB", blog.getCountry()); assertEquals("UTF-8", blog.getCharacterEncoding()); assertEquals(3, blog.getRecentBlogEntriesOnHomePage()); assertEquals(3, blog.getRecentResponsesOnHomePage()); assertTrue(blog.isPublic()); assertFalse(blog.isPrivate()); assertEquals("net.sourceforge.pebble.permalink.DefaultPermalinkProvider", blog.getPermalinkProviderName()); assertTrue(blog.getPermalinkProvider() instanceof DefaultPermalinkProvider); assertEquals("net.sourceforge.pebble.event.DefaultEventDispatcher", blog.getEventDispatcherName()); assertTrue(blog.getEventDispatcher() instanceof DefaultEventDispatcher); assertEquals("net.sourceforge.pebble.logging.CombinedLogFormatLogger", blog.getLoggerName()); assertTrue(blog.getLogger() instanceof CombinedLogFormatLogger); } /** * Tests that we can get a specific property. */ public void testGetProperty() { assertEquals(blog.getName(), blog.getProperty(Blog.NAME_KEY)); } /** * Tests that we can get a specific property. */ public void testGetProperties() { Properties props = blog.getProperties(); assertNotNull(props); assertEquals(blog.getName(), props.getProperty(Blog.NAME_KEY)); } /** * Tests that we can set a specific property. */ public void testSetProperty() { blog.setProperty(Blog.NAME_KEY, "New name"); assertEquals("New name", blog.getProperty(Blog.NAME_KEY)); assertEquals("New name", blog.getName()); // and a new property blog.setProperty("aNewPropertyKey", "A new property value"); assertEquals("A new property value", blog.getProperty("aNewPropertyKey")); } /** * Tests that we can store properties. public void testStoreProperties() { blog.setProperty("aNewPropertyKey", "A new property value"); try { blog.storeProperties(); blog = new Blog(TEST_BLOG_LOCATION.getAbsolutePath()); assertEquals("A new property value", blog.getProperty("aNewPropertyKey")); // and clean up blog.removeProperty("aNewPropertyKey"); blog.storeProperties(); } catch (BlogServiceException e) { fail(); } } */ /** * Tests that we can remove a specific property. */ public void testRemoveProperty() { blog.setProperty("aNewPropertyKey", "A new property value"); assertEquals("A new property value", blog.getProperty("aNewPropertyKey")); blog.removeProperty("aNewPropertyKey"); assertNull(blog.getProperty("aNewPropertyKey")); } /** * Tests that the correct calendar (with timezone) is created. */ public void testCalendar() { Calendar cal = blog.getCalendar(); assertEquals(blog.getTimeZone(), cal.getTimeZone()); } /** * Tests that we can get a specific Year instance. */ public void testGetBlogForYear() { Calendar cal = blog.getCalendar(); Year year = blog.getBlogForYear(cal.get(Calendar.YEAR)); assertNotNull(year); assertEquals(cal.get(Calendar.YEAR), year.getYear()); } /** * Tests that we can get a previous Year instance. */ public void testGetBlogForPreviousYear() { Calendar cal = blog.getCalendar(); Year year = blog.getBlogForYear(cal.get(Calendar.YEAR)); year = blog.getBlogForPreviousYear(year); assertNotNull(year); assertEquals(cal.get(Calendar.YEAR)-1, year.getYear()); } /** * Tests that we can get a next Year instance. */ public void testGetBlogForNextYear() { Calendar cal = blog.getCalendar(); Year year = blog.getBlogForYear(cal.get(Calendar.YEAR)); year = blog.getBlogForNextYear(year); assertNotNull(year); assertEquals(cal.get(Calendar.YEAR)+1, year.getYear()); } /** * Tests that we can get the first Month instance. */ public void testGetBlogForFirstMonth() { Month month = blog.getBlogForFirstMonth(); assertNotNull(month); // assertEquals(blog.getBlogForFirstYear(), month.getYear()); Calendar cal = blog.getCalendar(); assertEquals(cal.get(Calendar.MONTH)+1, month.getMonth()); } /** * Tests that we can get a Month instance. */ public void testGetBlogForMonth() { Month month = blog.getBlogForMonth(2003, 4); assertNotNull(month); assertEquals(2003, month.getYear().getYear()); assertEquals(4, month.getMonth()); } /** * Tests that we can get the Month instance for this month. */ public void testGetBlogForThisMonth() { Calendar cal = blog.getCalendar(); Month month = blog.getBlogForThisMonth(); assertNotNull(month); assertEquals(cal.get(Calendar.YEAR), month.getYear().getYear()); assertEquals(cal.get(Calendar.MONTH) + 1, month.getMonth()); } /** * Tests that we can get a Day instance. */ public void testGetBlogForDay() { Day day = blog.getBlogForDay(2003, 7, 14); assertNotNull(day); assertEquals(2003, day.getMonth().getYear().getYear()); assertEquals(7, day.getMonth().getMonth()); assertEquals(14, day.getDay()); } /** * Tests that we can get a Day instance. */ public void testGetBlogForDate() { Calendar cal = blog.getCalendar(); cal.set(Calendar.YEAR, 2003); cal.set(Calendar.MONTH, 6); cal.set(Calendar.DAY_OF_MONTH, 14); Day day = blog.getBlogForDay(cal.getTime()); assertNotNull(day); assertEquals(2003, day.getMonth().getYear().getYear()); assertEquals(7, day.getMonth().getMonth()); assertEquals(14, day.getDay()); } /** * Tests that we can get the Day instance for today. */ public void testGetBlogForToday() { Calendar cal = blog.getCalendar(); Day day = blog.getBlogForToday(); assertNotNull(day); assertEquals(cal.get(Calendar.YEAR), day.getMonth().getYear().getYear()); assertEquals(cal.get(Calendar.MONTH) + 1, day.getMonth().getMonth()); assertEquals(cal.get(Calendar.DAY_OF_MONTH), day.getDay()); } /** * Tests that blog owners can be assigned. */ public void testAssignBlogOwners() { blog.setProperty(Blog.BLOG_OWNERS_KEY, "user1"); assertEquals("user1", blog.getProperty(Blog.BLOG_OWNERS_KEY)); assertEquals("user1", blog.getBlogOwnersAsString()); Collection users = blog.getUsersInRole(Constants.BLOG_OWNER_ROLE); assertEquals(1, users.size()); assertTrue(users.contains("user1")); blog.setProperty(Blog.BLOG_OWNERS_KEY, "user1,user2"); assertEquals("user1,user2", blog.getProperty(Blog.BLOG_OWNERS_KEY)); assertEquals("user1,user2", blog.getBlogOwnersAsString()); users = blog.getUsersInRole(Constants.BLOG_OWNER_ROLE); assertEquals(2, users.size()); assertTrue(users.contains("user1")); assertTrue(users.contains("user2")); } /** * Tests that blog owners can be assigned. */ public void testNullBlogOwners() { blog.removeProperty(Blog.BLOG_OWNERS_KEY); assertEquals(null, blog.getBlogOwnersAsString()); Collection users = blog.getUsersInRole(Constants.BLOG_OWNER_ROLE); assertEquals(0, users.size()); } /** * Tests that it can be determined that a user is a blog owner. */ public void testUserIsBlogOwner() { blog.setProperty(Blog.BLOG_OWNERS_KEY, "user1"); assertTrue(blog.isUserInRole(Constants.BLOG_OWNER_ROLE, "user1")); assertFalse(blog.isUserInRole(Constants.BLOG_OWNER_ROLE, "user2")); } /** * Tests that when no blog contributors are specified, then everybody takes * on that role. */ public void testUserIsBlogOwnerByDefault() { blog.removeProperty(Blog.BLOG_OWNERS_KEY); assertTrue(blog.isUserInRole(Constants.BLOG_OWNER_ROLE, "user1")); assertTrue(blog.isUserInRole(Constants.BLOG_OWNER_ROLE, "usern")); } /** * Tests that blog contributors can be assigned. */ public void testAssignBlogContributors() { blog.setProperty(Blog.BLOG_CONTRIBUTORS_KEY, "user1"); assertEquals("user1", blog.getProperty(Blog.BLOG_CONTRIBUTORS_KEY)); assertEquals("user1", blog.getBlogContributorsAsString()); Collection users = blog.getUsersInRole(Constants.BLOG_CONTRIBUTOR_ROLE); assertEquals(1, users.size()); assertTrue(users.contains("user1")); blog.setProperty(Blog.BLOG_CONTRIBUTORS_KEY, "user1,user2"); assertEquals("user1,user2", blog.getProperty(Blog.BLOG_CONTRIBUTORS_KEY)); assertEquals("user1,user2", blog.getBlogContributorsAsString()); users = blog.getUsersInRole(Constants.BLOG_CONTRIBUTOR_ROLE); assertEquals(2, users.size()); assertTrue(users.contains("user1")); assertTrue(users.contains("user2")); blog.setProperty(Blog.BLOG_CONTRIBUTORS_KEY, "user1, user2"); assertEquals("user1, user2", blog.getProperty(Blog.BLOG_CONTRIBUTORS_KEY)); assertEquals("user1, user2", blog.getBlogContributorsAsString()); users = blog.getUsersInRole(Constants.BLOG_CONTRIBUTOR_ROLE); assertEquals(2, users.size()); assertTrue(users.contains("user1")); assertTrue(users.contains("user2")); } /** * Tests that blog contributors can be assigned. */ public void testNullBlogContributors() { blog.removeProperty(Blog.BLOG_CONTRIBUTORS_KEY); assertEquals(null, blog.getBlogContributorsAsString()); Collection users = blog.getUsersInRole(Constants.BLOG_CONTRIBUTOR_ROLE); assertEquals(0, users.size()); } /** * Tests that it can be determined that a user is a blog contributor. */ public void testUserIsBlogContributor() { blog.setProperty(Blog.BLOG_CONTRIBUTORS_KEY, "user1"); assertTrue(blog.isUserInRole(Constants.BLOG_CONTRIBUTOR_ROLE, "user1")); assertFalse(blog.isUserInRole(Constants.BLOG_CONTRIBUTOR_ROLE, "user2")); } /** * Tests that when no blog contributors are specified, then everybody takes * on that role. */ public void testUserIsBlogContributorByDefault() { blog.removeProperty(Blog.BLOG_CONTRIBUTORS_KEY); assertTrue(blog.isUserInRole(Constants.BLOG_CONTRIBUTOR_ROLE, "user1")); assertTrue(blog.isUserInRole(Constants.BLOG_CONTRIBUTOR_ROLE, "usern")); } public void testInvalidDayOfMonthAfterTimeZoneChanges() { blog.getRecentBlogEntries(); blog.setProperty(Blog.TIMEZONE_KEY, "America/New_York"); // this should not cause an exception to be thrown blog.getRecentBlogEntries(); } public void testGetRecentBlogEntriesFromEmptyBlog() { assertTrue(blog.getRecentBlogEntries(3).isEmpty()); } public void testGetRecentBlogEntries() throws BlogServiceException { BlogService service = new BlogService(); BlogEntry entry1 = new BlogEntry(blog); entry1.setTitle("title1"); entry1.setBody("body1"); service.putBlogEntry(entry1); BlogEntry entry2 = new BlogEntry(blog); entry2.setTitle("title2"); entry2.setBody("body2"); service.putBlogEntry(entry2); BlogEntry entry3 = new BlogEntry(blog); entry3.setTitle("title3"); entry3.setBody("body3"); service.putBlogEntry(entry3); BlogEntry entry4 = new BlogEntry(blog); entry4.setTitle("title4"); entry4.setBody("body4"); service.putBlogEntry(entry4); List entries = blog.getRecentBlogEntries(3); assertEquals(3, entries.size()); assertEquals(entry4, entries.get(0)); } /** * Tests the images directory is correct and that it exists. */ public void testImagesDirectoryAccessible() { File file = new File(blog.getRoot(), "images"); assertEquals(file, new File(blog.getImagesDirectory())); assertTrue(file.exists()); } /** * Tests the files directory is correct and that it exists. */ public void testFilesDirectoryAccessible() { File file = new File(blog.getRoot(), "files"); assertEquals(file, new File(blog.getFilesDirectory())); assertTrue(file.exists()); } /** * Tests the theme directory is correct and that it doesn't exist by default * - starting up Pebble creates a theme based on the template if the theme * - directory doesn't exist. */ public void testThemeDirectoryAccessible() { File file = new File(blog.getRoot(), "theme"); assertEquals(file, new File(blog.getThemeDirectory())); assertTrue(file.exists()); } /** * Tests setting a single e-mail address. */ public void testSingleEmailAddress() { blog.setProperty(Blog.EMAIL_KEY, "me@mydomain.com"); assertEquals("me@mydomain.com", blog.getEmail()); assertEquals(1, blog.getEmailAddresses().size()); assertEquals("me@mydomain.com", blog.getEmailAddresses().iterator().next()); } /** * Tests setting multiple e-mail address. */ public void testMultipleEmailAddresses() { blog.setProperty(Blog.EMAIL_KEY, "me@mydomain.com,you@yourdomain.com"); assertEquals("me@mydomain.com,you@yourdomain.com", blog.getEmail()); assertEquals(2, blog.getEmailAddresses().size()); Iterator it = blog.getEmailAddresses().iterator(); assertEquals("me@mydomain.com", it.next()); assertEquals("you@yourdomain.com", it.next()); } /** * Tests getting the first of multiple e-mail addresses. */ public void testFirstEmailAddress() { blog.setProperty(Blog.EMAIL_KEY, ""); assertEquals("", blog.getFirstEmailAddress()); blog.setProperty(Blog.EMAIL_KEY, "me@mydomain.com"); assertEquals("me@mydomain.com", blog.getFirstEmailAddress()); blog.setProperty(Blog.EMAIL_KEY, "me@mydomain.com,you@yourdomain.com"); assertEquals("me@mydomain.com", blog.getFirstEmailAddress()); } /** * Tests the domain. */ public void testDomain() { assertEquals("www.yourdomain.com", blog.getDomainName()); PebbleContext.getInstance().getConfiguration().setUrl("http://www.yourdomain.com:8080/blog"); assertEquals("www.yourdomain.com", blog.getDomainName()); } /** * Tests the protocol. */ public void testProtocol() { assertEquals("http://", blog.getProtocol()); } /** * Tests the context. */ public void testContext() { assertEquals("/blog/", blog.getContext()); PebbleContext.getInstance().getConfiguration().setUrl("http://www.yourdomain.com:8080"); assertEquals("/", blog.getContext()); PebbleContext.getInstance().getConfiguration().setUrl("http://www.yourdomain.com:8080/"); assertEquals("/", blog.getContext()); } /** * Tests the logger can be accessed and is of the correct type. */ public void testLogger() { assertNotNull(blog.getLogger()); assertTrue(blog.getLogger() instanceof CombinedLogFormatLogger); } /** * Tests that listeners are fired when the blog is started. */ public void testListenersFiredWhenBlogStarted() { final StringBuffer buf = new StringBuffer("123"); BlogListener listener = new BlogListener() { public void blogStarted(BlogEvent event) { assertEquals(blog, event.getSource()); buf.reverse(); } public void blogStopped(BlogEvent event) { fail(); } }; blog.getEventListenerList().addBlogListener(listener); blog.start(); assertEquals("321", buf.toString()); blog.getEventListenerList().removeBlogListener(listener); blog.stop(); } /** * Tests that listeners are fired when the blog is stopped. */ public void testListenersFiredWhenBlogStopped() { final StringBuffer buf = new StringBuffer("123"); BlogListener listener = new BlogListener() { public void blogStarted(BlogEvent event) { fail(); } public void blogStopped(BlogEvent event) { assertEquals(blog, event.getSource()); buf.reverse(); } }; blog.getEventListenerList().addBlogListener(listener); blog.stop(); assertEquals("321", buf.toString()); } public void testApprovedCommentsForUnpublishedBlogEntriesDontShowUp() throws BlogServiceException { BlogService service = new BlogService(); BlogEntry blogEntry = new BlogEntry(blog); blogEntry.setTitle("title1"); blogEntry.setBody("body1"); blogEntry.setPublished(false); service.putBlogEntry(blogEntry); Comment comment = blogEntry.createComment("title", "body", "author", "email", "website", "avatar", "127.0.0.1"); blogEntry.addComment(comment); service.putBlogEntry(blogEntry); assertFalse(blog.getRecentApprovedResponses().contains(comment)); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * Created on Feb 15, 2006 * * TODO To change the template for this generated file go to Window - Preferences - Java - Code * Style - Code Templates */ package org.apache.geode.internal.cache; import static org.junit.Assert.*; import java.io.File; import java.util.Collections; import java.util.HashSet; import java.util.Set; import org.junit.Test; import org.junit.experimental.categories.Category; import org.apache.geode.StatisticsFactory; import org.apache.geode.test.dunit.ThreadUtils; import org.apache.geode.test.junit.categories.IntegrationTest; /** * Testing methods for SimpleDiskRegion.java api's * * @since GemFire 5.1 */ @Category(IntegrationTest.class) public class SimpleDiskRegionJUnitTest extends DiskRegionTestingBase { private Set keyIds = Collections.synchronizedSet(new HashSet()); private DiskRegionProperties diskProps = new DiskRegionProperties(); @Override protected final void postSetUp() throws Exception { diskProps.setDiskDirs(dirs); } /* * Test method for 'org.apache.geode.internal.cache.SimpleDiskRegion.basicClose()' */ @Test public void testBasicClose() { { deleteFiles(); try { region = DiskRegionHelperFactory.getAsyncOverFlowAndPersistRegion(cache, diskProps); } catch (Exception e) { logWriter.error("Exception occurred", e); fail(" Exception in createOverflowandPersist due to " + e); } region.close(); closeDiskStores(); checkIfContainsFileWithExt("lk"); } { deleteFiles(); try { region = DiskRegionHelperFactory.getAsyncOverFlowOnlyRegion(cache, diskProps); } catch (Exception e) { logWriter.error("Exception occurred", e); fail(" Exception in createOverflowOnly due to " + e); } region.close(); closeDiskStores(); checkIfContainsFileWithExt("lk"); } { deleteFiles(); try { region = DiskRegionHelperFactory.getAsyncPersistOnlyRegion(cache, diskProps); } catch (Exception e) { logWriter.error("Exception occurred", e); fail(" Exception in createOverflowandPersist due to " + e); } region.close(); closeDiskStores(); checkIfContainsFileWithExt("lk"); } // Asif: Recreate the region so that it will be destroyed try { region = DiskRegionHelperFactory.getAsyncPersistOnlyRegion(cache, diskProps); } catch (Exception e) { logWriter.error("Exception occurred", e); fail(" Exception in createOverflowandPersist due to " + e); } } void checkIfContainsFileWithSubstring(String substr) { for (int i = 0; i < dirs.length; i++) { File[] files = dirs[i].listFiles(); for (int j = 0; j < files.length; j++) { if (files[j].getAbsolutePath().contains(substr)) { fail("file \"" + files[j].getAbsolutePath() + "\" still exists"); } } } } void expectContainsFileWithSubstring(String substr) { for (int i = 0; i < dirs.length; i++) { File[] files = dirs[i].listFiles(); for (int j = 0; j < files.length; j++) { if (files[j].getAbsolutePath().contains(substr)) { return; // found one } } } fail("did not find a file with the substring " + substr); } void checkIfContainsFileWithExt(String fileExtension) { for (int i = 0; i < dirs.length; i++) { File[] files = dirs[i].listFiles(); for (int j = 0; j < files.length; j++) { if (files[j].getAbsolutePath().endsWith(fileExtension)) { fail("file \"" + files[j].getAbsolutePath() + "\" still exists"); } } } } /* * Test method for 'org.apache.geode.internal.cache.SimpleDiskRegion.basicDestroy()' */ @Test public void testBasicDestroy() { { deleteFiles(); try { region = DiskRegionHelperFactory.getAsyncOverFlowAndPersistRegion(cache, diskProps); } catch (Exception e) { logWriter.error("Exception occurred", e); fail(" Exception in createOverflowandPersist due to " + e); } region.destroyRegion(); closeDiskStores(); checkIfContainsFileWithExt("lk"); // note that this only passes because the test never forced us to create the following files checkIfContainsFileWithExt("crf"); checkIfContainsFileWithExt("drf"); checkIfContainsFileWithSubstring("OVERFLOW"); } { deleteFiles(); try { region = DiskRegionHelperFactory.getAsyncOverFlowOnlyRegion(cache, diskProps); } catch (Exception e) { logWriter.error("Exception occurred", e); fail(" Exception in createOverflowOnly due to " + e); } region.destroyRegion(); closeDiskStores(); checkIfContainsFileWithExt("lk"); // note that this only passes because the test never forced us to create the following files checkIfContainsFileWithExt("crf"); checkIfContainsFileWithExt("drf"); checkIfContainsFileWithSubstring("OVERFLOW"); } { deleteFiles(); try { region = DiskRegionHelperFactory.getAsyncPersistOnlyRegion(cache, diskProps); } catch (Exception e) { logWriter.error("Exception occurred", e); fail(" Exception in createOverflowandPersist due to " + e); } region.destroyRegion(); closeDiskStores(); checkIfContainsFileWithExt("lk"); // note that this only passes because the test never forced us to create the following files checkIfContainsFileWithExt("crf"); checkIfContainsFileWithExt("drf"); checkIfContainsFileWithSubstring("OVERFLOW"); } } // /* // * Test method for // * 'org.apache.geode.internal.cache.SimpleDiskRegion.basicInitializeOwner()' // */ // @Test // public void testBasicInitializeOwner() // { // deleteFiles(); // region = DiskRegionHelperFactory.getSyncPersistOnlyRegion(cache, diskProps); // DiskRegion dr = ((LocalRegion)region).getDiskRegion(); // put100Int(); // assertIndexDetailsEquals(new Integer(1), region.get(new Integer(1))); // Oplog oplog = dr.getChild(); // int id = oplog.getOplogId(); // StatisticsFactory factory = dr.getOwner().getCache().getDistributedSystem(); // Oplog newOplog = new Oplog(id + 1, dr.getDiskStore(), new DirectoryHolder(factory, // dirs[0], 1000000, 0)); // dr.setChild(newOplog); // region.clear(); // newOplog = dr.getChild(); // assertIndexDetailsEquals(null, region.get(new Integer(1))); // try { // dr.addToOplogSet(id, new File(oplog.getOplogFileForTest() // .getPath()), dr.getNextDir()); // } // catch (Exception e) { // logWriter // .error( // "Exception in synching data present in the buffers of RandomAccessFile of Oplog, to the disk", // e); // fail("Test failed because synching of data present in buffer of RandomAccesFile "); // } // oplog.close(); // dr.setIsRecovering(true); // dr.basicInitializeOwner(); // assertIndexDetailsEquals(new Integer(1), region.get(new Integer(1))); // closeDown(); // } /* * Test method for 'org.apache.geode.internal.cache.SimpleDiskRegion.getChild()' */ @Test public void testGetChild() { deleteFiles(); region = DiskRegionHelperFactory.getAsyncPersistOnlyRegion(cache, diskProps); DiskRegion dr = ((LocalRegion) region).getDiskRegion(); Oplog oplog = dr.testHook_getChild(); long id = oplog.getOplogId(); StatisticsFactory factory = region.getCache().getDistributedSystem(); Oplog newOplog = new Oplog(id, dr.getOplogSet(), new DirectoryHolder(factory, dirs[0], 1000000, 0)); dr.getDiskStore().getPersistentOplogs().setChild(newOplog); assertEquals(newOplog, dr.testHook_getChild()); dr.setChild(oplog); assertEquals(oplog, dr.testHook_getChild()); newOplog.close(); newOplog = null; closeDown(); } /* * Test method for 'org.apache.geode.internal.cache.SimpleDiskRegion.getNextDir()' */ @Test public void testGetNextDir() { deleteFiles(); File file1 = new File("SimpleDiskRegionJUnitTestDir1"); file1.mkdir(); file1.deleteOnExit(); File file2 = new File("SimpleDiskRegionJUnitTestDir2"); file2.mkdir(); file2.deleteOnExit(); File file3 = new File("SimpleDiskRegionJUnitTestDir3"); file3.mkdir(); file3.deleteOnExit(); File file4 = new File("SimpleDiskRegionJUnitTestDir4"); file4.mkdir(); file4.deleteOnExit(); File[] oldDirs = new File[4]; oldDirs = dirs; dirs[0] = file1; dirs[1] = file2; dirs[2] = file3; dirs[3] = file4; closeDiskStores(); deleteFiles(); DiskRegionProperties diskProps = new DiskRegionProperties(); diskProps.setDiskDirs(dirs); region = DiskRegionHelperFactory.getAsyncPersistOnlyRegion(cache, diskProps); DiskRegion dr = ((LocalRegion) region).getDiskRegion(); assertEquals(file2, dr.getNextDir().getDir()); assertEquals(file3, dr.getNextDir().getDir()); assertEquals(file4, dr.getNextDir().getDir()); assertEquals(file1, dr.getNextDir().getDir()); closeDown(); deleteFiles(); dirs = oldDirs; } /* * Test method for 'org.apache.geode.internal.cache.SimpleDiskRegion.newDiskId()' */ @Test public void testNewDiskId() { deleteFiles(); region = DiskRegionHelperFactory.getAsyncPersistOnlyRegion(cache, diskProps); TestNewDiskId newDiskId = new TestNewDiskId(); Thread thread1 = new Thread(newDiskId); Thread thread2 = new Thread(newDiskId); Thread thread3 = new Thread(newDiskId); Thread thread4 = new Thread(newDiskId); Thread thread5 = new Thread(newDiskId); thread1.setDaemon(true); thread2.setDaemon(true); thread3.setDaemon(true); thread4.setDaemon(true); thread5.setDaemon(true); thread1.start(); thread2.start(); thread3.start(); thread4.start(); thread5.start(); ThreadUtils.join(thread1, 30 * 1000); ThreadUtils.join(thread2, 30 * 1000); ThreadUtils.join(thread3, 30 * 1000); ThreadUtils.join(thread4, 30 * 1000); ThreadUtils.join(thread5, 30 * 1000); if (keyIds.size() != 50000) { fail("Size not equal to 5000 as expected but is " + keyIds.size()); } closeDown(); } class TestNewDiskId implements Runnable { public void run() { long keyId = 0; for (int i = 0; i < 10000; i++) { keyId = ((LocalRegion) region).getDiskRegion().newOplogEntryId(); keyIds.add(new Long(keyId)); } } } }
/** */ package CIM.IEC61970.Meas.impl; import CIM.IEC61970.Meas.Command; import CIM.IEC61970.Meas.Discrete; import CIM.IEC61970.Meas.MeasPackage; import CIM.IEC61970.Meas.ValueAliasSet; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.common.notify.NotificationChain; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.InternalEObject; import org.eclipse.emf.ecore.impl.ENotificationImpl; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>Command</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * </p> * <ul> * <li>{@link CIM.IEC61970.Meas.impl.CommandImpl#getNormalValue <em>Normal Value</em>}</li> * <li>{@link CIM.IEC61970.Meas.impl.CommandImpl#getValueAliasSet <em>Value Alias Set</em>}</li> * <li>{@link CIM.IEC61970.Meas.impl.CommandImpl#getValue <em>Value</em>}</li> * <li>{@link CIM.IEC61970.Meas.impl.CommandImpl#getDiscrete <em>Discrete</em>}</li> * </ul> * * @generated */ public class CommandImpl extends ControlImpl implements Command { /** * The default value of the '{@link #getNormalValue() <em>Normal Value</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getNormalValue() * @generated * @ordered */ protected static final int NORMAL_VALUE_EDEFAULT = 0; /** * The cached value of the '{@link #getNormalValue() <em>Normal Value</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getNormalValue() * @generated * @ordered */ protected int normalValue = NORMAL_VALUE_EDEFAULT; /** * The cached value of the '{@link #getValueAliasSet() <em>Value Alias Set</em>}' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getValueAliasSet() * @generated * @ordered */ protected ValueAliasSet valueAliasSet; /** * The default value of the '{@link #getValue() <em>Value</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getValue() * @generated * @ordered */ protected static final int VALUE_EDEFAULT = 0; /** * The cached value of the '{@link #getValue() <em>Value</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getValue() * @generated * @ordered */ protected int value = VALUE_EDEFAULT; /** * The cached value of the '{@link #getDiscrete() <em>Discrete</em>}' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getDiscrete() * @generated * @ordered */ protected Discrete discrete; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected CommandImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return MeasPackage.Literals.COMMAND; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public int getNormalValue() { return normalValue; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setNormalValue(int newNormalValue) { int oldNormalValue = normalValue; normalValue = newNormalValue; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, MeasPackage.COMMAND__NORMAL_VALUE, oldNormalValue, normalValue)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ValueAliasSet getValueAliasSet() { if (valueAliasSet != null && valueAliasSet.eIsProxy()) { InternalEObject oldValueAliasSet = (InternalEObject)valueAliasSet; valueAliasSet = (ValueAliasSet)eResolveProxy(oldValueAliasSet); if (valueAliasSet != oldValueAliasSet) { if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.RESOLVE, MeasPackage.COMMAND__VALUE_ALIAS_SET, oldValueAliasSet, valueAliasSet)); } } return valueAliasSet; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ValueAliasSet basicGetValueAliasSet() { return valueAliasSet; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetValueAliasSet(ValueAliasSet newValueAliasSet, NotificationChain msgs) { ValueAliasSet oldValueAliasSet = valueAliasSet; valueAliasSet = newValueAliasSet; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, MeasPackage.COMMAND__VALUE_ALIAS_SET, oldValueAliasSet, newValueAliasSet); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setValueAliasSet(ValueAliasSet newValueAliasSet) { if (newValueAliasSet != valueAliasSet) { NotificationChain msgs = null; if (valueAliasSet != null) msgs = ((InternalEObject)valueAliasSet).eInverseRemove(this, MeasPackage.VALUE_ALIAS_SET__COMMANDS, ValueAliasSet.class, msgs); if (newValueAliasSet != null) msgs = ((InternalEObject)newValueAliasSet).eInverseAdd(this, MeasPackage.VALUE_ALIAS_SET__COMMANDS, ValueAliasSet.class, msgs); msgs = basicSetValueAliasSet(newValueAliasSet, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, MeasPackage.COMMAND__VALUE_ALIAS_SET, newValueAliasSet, newValueAliasSet)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public int getValue() { return value; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setValue(int newValue) { int oldValue = value; value = newValue; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, MeasPackage.COMMAND__VALUE, oldValue, value)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public Discrete getDiscrete() { if (discrete != null && discrete.eIsProxy()) { InternalEObject oldDiscrete = (InternalEObject)discrete; discrete = (Discrete)eResolveProxy(oldDiscrete); if (discrete != oldDiscrete) { if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.RESOLVE, MeasPackage.COMMAND__DISCRETE, oldDiscrete, discrete)); } } return discrete; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public Discrete basicGetDiscrete() { return discrete; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetDiscrete(Discrete newDiscrete, NotificationChain msgs) { Discrete oldDiscrete = discrete; discrete = newDiscrete; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, MeasPackage.COMMAND__DISCRETE, oldDiscrete, newDiscrete); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setDiscrete(Discrete newDiscrete) { if (newDiscrete != discrete) { NotificationChain msgs = null; if (discrete != null) msgs = ((InternalEObject)discrete).eInverseRemove(this, MeasPackage.DISCRETE__COMMAND, Discrete.class, msgs); if (newDiscrete != null) msgs = ((InternalEObject)newDiscrete).eInverseAdd(this, MeasPackage.DISCRETE__COMMAND, Discrete.class, msgs); msgs = basicSetDiscrete(newDiscrete, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, MeasPackage.COMMAND__DISCRETE, newDiscrete, newDiscrete)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public NotificationChain eInverseAdd(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case MeasPackage.COMMAND__VALUE_ALIAS_SET: if (valueAliasSet != null) msgs = ((InternalEObject)valueAliasSet).eInverseRemove(this, MeasPackage.VALUE_ALIAS_SET__COMMANDS, ValueAliasSet.class, msgs); return basicSetValueAliasSet((ValueAliasSet)otherEnd, msgs); case MeasPackage.COMMAND__DISCRETE: if (discrete != null) msgs = ((InternalEObject)discrete).eInverseRemove(this, MeasPackage.DISCRETE__COMMAND, Discrete.class, msgs); return basicSetDiscrete((Discrete)otherEnd, msgs); } return super.eInverseAdd(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case MeasPackage.COMMAND__VALUE_ALIAS_SET: return basicSetValueAliasSet(null, msgs); case MeasPackage.COMMAND__DISCRETE: return basicSetDiscrete(null, msgs); } return super.eInverseRemove(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case MeasPackage.COMMAND__NORMAL_VALUE: return getNormalValue(); case MeasPackage.COMMAND__VALUE_ALIAS_SET: if (resolve) return getValueAliasSet(); return basicGetValueAliasSet(); case MeasPackage.COMMAND__VALUE: return getValue(); case MeasPackage.COMMAND__DISCRETE: if (resolve) return getDiscrete(); return basicGetDiscrete(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case MeasPackage.COMMAND__NORMAL_VALUE: setNormalValue((Integer)newValue); return; case MeasPackage.COMMAND__VALUE_ALIAS_SET: setValueAliasSet((ValueAliasSet)newValue); return; case MeasPackage.COMMAND__VALUE: setValue((Integer)newValue); return; case MeasPackage.COMMAND__DISCRETE: setDiscrete((Discrete)newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case MeasPackage.COMMAND__NORMAL_VALUE: setNormalValue(NORMAL_VALUE_EDEFAULT); return; case MeasPackage.COMMAND__VALUE_ALIAS_SET: setValueAliasSet((ValueAliasSet)null); return; case MeasPackage.COMMAND__VALUE: setValue(VALUE_EDEFAULT); return; case MeasPackage.COMMAND__DISCRETE: setDiscrete((Discrete)null); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case MeasPackage.COMMAND__NORMAL_VALUE: return normalValue != NORMAL_VALUE_EDEFAULT; case MeasPackage.COMMAND__VALUE_ALIAS_SET: return valueAliasSet != null; case MeasPackage.COMMAND__VALUE: return value != VALUE_EDEFAULT; case MeasPackage.COMMAND__DISCRETE: return discrete != null; } return super.eIsSet(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String toString() { if (eIsProxy()) return super.toString(); StringBuffer result = new StringBuffer(super.toString()); result.append(" (normalValue: "); result.append(normalValue); result.append(", value: "); result.append(value); result.append(')'); return result.toString(); } } //CommandImpl
/* * Copyright (c) 2008-2013, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.nio; import com.hazelcast.logging.ILogger; import javax.crypto.Cipher; import java.nio.ByteBuffer; import java.util.logging.Level; import static com.hazelcast.nio.IOUtil.copyToDirectBuffer; public class SocketPacketWriter implements SocketWriter<Packet> { private final PacketWriter packetWriter; final Connection connection; final ILogger logger; SocketPacketWriter(Connection connection) { this.connection = connection; final IOService ioService = connection.getConnectionManager().ioService; this.logger = ioService.getLogger(SocketPacketWriter.class.getName()); boolean symmetricEncryptionEnabled = CipherHelper.isSymmetricEncryptionEnabled(ioService); boolean asymmetricEncryptionEnabled = CipherHelper.isAsymmetricEncryptionEnabled(ioService); if (asymmetricEncryptionEnabled || symmetricEncryptionEnabled) { if (asymmetricEncryptionEnabled && symmetricEncryptionEnabled) { logger.log(Level.INFO, "Incorrect encryption configuration."); logger.log(Level.INFO, "You can enable either SymmetricEncryption or AsymmetricEncryption."); throw new RuntimeException(); } else if (symmetricEncryptionEnabled) { packetWriter = new SymmetricCipherPacketWriter(); logger.log(Level.INFO, "Writer started with SymmetricEncryption"); } else { packetWriter = new AsymmetricCipherPacketWriter(); logger.log(Level.INFO, "Writer started with AsymmetricEncryption"); } } else { packetWriter = new DefaultPacketWriter(); } } public boolean write(Packet socketWritable, ByteBuffer socketBuffer) throws Exception { return packetWriter.writePacket(socketWritable, socketBuffer); } interface PacketWriter { boolean writePacket(Packet packet, ByteBuffer socketBB) throws Exception; } class DefaultPacketWriter implements PacketWriter { public boolean writePacket(Packet packet, ByteBuffer socketBB) { return packet.writeToSocketBuffer(socketBB); } } class AsymmetricCipherPacketWriter implements PacketWriter { final ByteBuffer cipherBuffer = ByteBuffer.allocate(2 * SEND_SOCKET_BUFFER_SIZE); final Cipher cipher; final int writeBlockSize; boolean aliasWritten = false; AsymmetricCipherPacketWriter() { Cipher c = null; try { c = CipherHelper.createAsymmetricWriterCipher(connection.getConnectionManager().ioService); } catch (Exception e) { logger.log(Level.SEVERE, "Asymmetric Cipher for WriteHandler cannot be initialized.", e); cipher = null; writeBlockSize = 0; CipherHelper.handleCipherException(e, connection); return; } cipher = c; writeBlockSize = cipher.getBlockSize(); } public boolean writePacket(Packet packet, ByteBuffer socketBB) throws Exception { if (!aliasWritten) { String localAlias = CipherHelper.getKeyAlias(connection.getConnectionManager().ioService); byte[] localAliasBytes = localAlias.getBytes(); socketBB.putInt(localAliasBytes.length); socketBB.put(localAliasBytes); aliasWritten = true; } boolean complete = encryptAndWrite(packet, socketBB); if (complete) { aliasWritten = false; } return complete; } public final boolean encryptAndWrite(Packet packet, ByteBuffer socketBB) throws Exception { if (cipherBuffer.position() > 0 && socketBB.hasRemaining()) { cipherBuffer.flip(); copyToDirectBuffer(cipherBuffer, socketBB); if (cipherBuffer.hasRemaining()) { cipherBuffer.compact(); } else { cipherBuffer.clear(); } } packet.totalWritten += encryptAndWriteToSocket(packet.bbSizes, socketBB); packet.totalWritten += encryptAndWriteToSocket(packet.bbHeader, socketBB); if (packet.getKey() != null && packet.getKey().size() > 0 && socketBB.hasRemaining()) { packet.totalWritten += encryptAndWriteToSocket(packet.getKey().buffer, socketBB); } if (packet.getValue() != null && packet.getValue().size() > 0 && socketBB.hasRemaining()) { packet.totalWritten += encryptAndWriteToSocket(packet.getValue().buffer, socketBB); } return packet.totalWritten >= packet.totalSize; } private int encryptAndWriteToSocket(ByteBuffer src, ByteBuffer socketBB) throws Exception { int remaining = src.remaining(); if (src.hasRemaining()) { doCipherUpdate(src); cipherBuffer.flip(); copyToDirectBuffer(cipherBuffer, socketBB); if (cipherBuffer.hasRemaining()) { cipherBuffer.compact(); } else { cipherBuffer.clear(); } return remaining - src.remaining(); } return 0; } private void doCipherUpdate(ByteBuffer src) throws Exception { while (src.hasRemaining()) { int remaining = src.remaining(); if (remaining > writeBlockSize) { int oldLimit = src.limit(); src.limit(src.position() + writeBlockSize); int outputAppendSize = cipher.doFinal(src, cipherBuffer); src.limit(oldLimit); } else { int outputAppendSize = cipher.doFinal(src, cipherBuffer); } } } } class SymmetricCipherPacketWriter implements PacketWriter { boolean sizeWritten = false; final ByteBuffer cipherBuffer = ByteBuffer.allocate(SEND_SOCKET_BUFFER_SIZE); final Cipher cipher; SymmetricCipherPacketWriter() { Cipher c = null; try { c = CipherHelper.createSymmetricWriterCipher(connection.getConnectionManager().ioService); } catch (Exception e) { logger.log(Level.SEVERE, "Symmetric Cipher for WriteHandler cannot be initialized.", e); CipherHelper.handleCipherException(e, connection); } cipher = c; } public boolean writePacket(Packet packet, ByteBuffer socketBB) throws Exception { if (cipherBuffer.position() > 0 && socketBB.hasRemaining()) { cipherBuffer.flip(); copyToDirectBuffer(cipherBuffer, socketBB); if (cipherBuffer.hasRemaining()) { cipherBuffer.compact(); } else { cipherBuffer.clear(); } } if (!sizeWritten) { int cipherSize = cipher.getOutputSize(packet.totalSize); socketBB.putInt(cipherSize); sizeWritten = true; } packet.totalWritten += encryptAndWriteToSocket(packet.bbSizes, socketBB); packet.totalWritten += encryptAndWriteToSocket(packet.bbHeader, socketBB); if (packet.getKey() != null && packet.getKey().size() > 0 && socketBB.hasRemaining()) { packet.totalWritten += encryptAndWriteToSocket(packet.getKey().buffer, socketBB); } if (packet.getValue() != null && packet.getValue().size() > 0 && socketBB.hasRemaining()) { packet.totalWritten += encryptAndWriteToSocket(packet.getValue().buffer, socketBB); } boolean complete = packet.totalWritten >= packet.totalSize; if (complete) { if (socketBB.remaining() >= cipher.getOutputSize(0)) { sizeWritten = false; socketBB.put(cipher.doFinal()); } else { return false; } } return complete; } private int encryptAndWriteToSocket(ByteBuffer src, ByteBuffer socketBB) throws Exception { int remaining = src.remaining(); if (src.hasRemaining() && cipherBuffer.hasRemaining()) { int outputSize = cipher.getOutputSize(src.remaining()); if (outputSize <= cipherBuffer.remaining()) { cipher.update(src, cipherBuffer); } else { int min = Math.min(src.remaining(), cipherBuffer.remaining()); int len = min / 2; if (len > 0) { int limitOld = src.limit(); src.limit(src.position() + len); cipher.update(src, cipherBuffer); src.limit(limitOld); } else { return 0; } } cipherBuffer.flip(); copyToDirectBuffer(cipherBuffer, socketBB); if (cipherBuffer.hasRemaining()) { cipherBuffer.compact(); } else { cipherBuffer.clear(); } return remaining - src.remaining(); } return 0; } } }
/** * Copyright 2007-2013 South-East European Research Centre (SEERC), * The University of Sheffield (http://www.seerc.org) * * Developed by Dimitrios Kourtesis (dkourtesis@seerc.org; d.kourtesis@gmail.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * RemoveRFPFromIndexResponse_type0.java * * This file was auto-generated from WSDL * by the Apache Axis2 version: 1.3 Built on : Aug 10, 2007 (04:45:58 LKT) */ package org.seerc.fusion.sr.api.xsd; /** * RemoveRFPFromIndexResponse_type0 bean class */ public class RemoveRFPFromIndexResponse_type0 implements org.apache.axis2.databinding.ADBBean { public static final javax.xml.namespace.QName MY_QNAME = new javax.xml.namespace.QName("http://api.sr.fusion.seerc.org/xsd", "removeRFPFromIndexResponse_type0", "ns1"); protected java.lang.String[] localString; private static java.lang.String generatePrefix(java.lang.String namespace) { if (namespace.equals("http://api.sr.fusion.seerc.org/xsd")) { return "ns1"; } return org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix(); } public java.lang.String[] getString() { return localString; } public void setString(java.lang.String[] itemList) { this.localString = itemList; } public java.lang.String toString() { java.lang.StringBuffer outString = new java.lang.StringBuffer(); if (localString != null) { for (int i = 0; i < localString.length; i++) { outString.append(localString[i].toString()).append(" "); } } return outString.toString().trim(); } /** * isReaderMTOMAware * @return true if the reader supports MTOM */ public static boolean isReaderMTOMAware( javax.xml.stream.XMLStreamReader reader) { boolean isReaderMTOMAware = false; try { isReaderMTOMAware = java.lang.Boolean.TRUE.equals(reader.getProperty( org.apache.axiom.om.OMConstants.IS_DATA_HANDLERS_AWARE)); } catch (java.lang.IllegalArgumentException e) { isReaderMTOMAware = false; } return isReaderMTOMAware; } /** * * @param parentQName * @param factory * @return org.apache.axiom.om.OMElement */ public org.apache.axiom.om.OMElement getOMElement( final javax.xml.namespace.QName parentQName, final org.apache.axiom.om.OMFactory factory) throws org.apache.axis2.databinding.ADBException { org.apache.axiom.om.OMDataSource dataSource = new org.apache.axis2.databinding.ADBDataSource(this, MY_QNAME) { public void serialize( org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { RemoveRFPFromIndexResponse_type0.this.serialize(MY_QNAME, factory, xmlWriter); } }; return new org.apache.axiom.om.impl.llom.OMSourcedElementImpl(MY_QNAME, factory, dataSource); } public void serialize(final javax.xml.namespace.QName parentQName, final org.apache.axiom.om.OMFactory factory, org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException { // first write the start element java.lang.String namespace = parentQName.getNamespaceURI(); java.lang.String localName = parentQName.getLocalPart(); if (!namespace.equals("")) { java.lang.String prefix = xmlWriter.getPrefix(namespace); if (prefix == null) { prefix = generatePrefix(namespace); xmlWriter.writeStartElement(prefix, localName, namespace); xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } else { xmlWriter.writeStartElement(namespace, localName); } } else { xmlWriter.writeStartElement(localName); } xmlWriter.writeCharacters(RemoveRFPFromIndexResponse_type0.this.toString()); xmlWriter.writeEndElement(); } /** * Util method to write an attribute with the ns prefix */ private void writeAttribute(java.lang.String prefix, java.lang.String namespace, java.lang.String attName, java.lang.String attValue, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { if (xmlWriter.getPrefix(namespace) == null) { xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } xmlWriter.writeAttribute(namespace, attName, attValue); } /** * Util method to write an attribute without the ns prefix */ private void writeAttribute(java.lang.String namespace, java.lang.String attName, java.lang.String attValue, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { if (namespace.equals("")) { xmlWriter.writeAttribute(attName, attValue); } else { registerPrefix(xmlWriter, namespace); xmlWriter.writeAttribute(namespace, attName, attValue); } } /** * Util method to write an attribute without the ns prefix */ private void writeQNameAttribute(java.lang.String namespace, java.lang.String attName, javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { java.lang.String attributeNamespace = qname.getNamespaceURI(); java.lang.String attributePrefix = xmlWriter.getPrefix(attributeNamespace); if (attributePrefix == null) { attributePrefix = registerPrefix(xmlWriter, attributeNamespace); } java.lang.String attributeValue; if (attributePrefix.trim().length() > 0) { attributeValue = attributePrefix + ":" + qname.getLocalPart(); } else { attributeValue = qname.getLocalPart(); } if (namespace.equals("")) { xmlWriter.writeAttribute(attName, attributeValue); } else { registerPrefix(xmlWriter, namespace); xmlWriter.writeAttribute(namespace, attName, attributeValue); } } /** * method to handle Qnames */ private void writeQName(javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { java.lang.String namespaceURI = qname.getNamespaceURI(); if (namespaceURI != null) { java.lang.String prefix = xmlWriter.getPrefix(namespaceURI); if (prefix == null) { prefix = generatePrefix(namespaceURI); xmlWriter.writeNamespace(prefix, namespaceURI); xmlWriter.setPrefix(prefix, namespaceURI); } if (prefix.trim().length() > 0) { xmlWriter.writeCharacters(prefix + ":" + org.apache.axis2.databinding.utils.ConverterUtil.convertToString( qname)); } else { // i.e this is the default namespace xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString( qname)); } } else { xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString( qname)); } } private void writeQNames(javax.xml.namespace.QName[] qnames, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { if (qnames != null) { // we have to store this data until last moment since it is not possible to write any // namespace data after writing the charactor data java.lang.StringBuffer stringToWrite = new java.lang.StringBuffer(); java.lang.String namespaceURI = null; java.lang.String prefix = null; for (int i = 0; i < qnames.length; i++) { if (i > 0) { stringToWrite.append(" "); } namespaceURI = qnames[i].getNamespaceURI(); if (namespaceURI != null) { prefix = xmlWriter.getPrefix(namespaceURI); if ((prefix == null) || (prefix.length() == 0)) { prefix = generatePrefix(namespaceURI); xmlWriter.writeNamespace(prefix, namespaceURI); xmlWriter.setPrefix(prefix, namespaceURI); } if (prefix.trim().length() > 0) { stringToWrite.append(prefix).append(":") .append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString( qnames[i])); } else { stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString( qnames[i])); } } else { stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString( qnames[i])); } } xmlWriter.writeCharacters(stringToWrite.toString()); } } /** * Register a namespace prefix */ private java.lang.String registerPrefix( javax.xml.stream.XMLStreamWriter xmlWriter, java.lang.String namespace) throws javax.xml.stream.XMLStreamException { java.lang.String prefix = xmlWriter.getPrefix(namespace); if (prefix == null) { prefix = generatePrefix(namespace); while (xmlWriter.getNamespaceContext().getNamespaceURI(prefix) != null) { prefix = org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix(); } xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } return prefix; } /** * databinding method to get an XML representation of this object * */ public javax.xml.stream.XMLStreamReader getPullParser( javax.xml.namespace.QName qName) throws org.apache.axis2.databinding.ADBException { return new org.apache.axis2.databinding.utils.reader.ADBXMLStreamReaderImpl(MY_QNAME, new java.lang.Object[] { org.apache.axis2.databinding.utils.reader.ADBXMLStreamReader.ELEMENT_TEXT, toString() }, null); } /** * Factory class that keeps the parse method */ public static class Factory { public static RemoveRFPFromIndexResponse_type0 fromString( javax.xml.stream.XMLStreamReader xmlStreamReader, java.lang.String content) throws org.apache.axis2.databinding.ADBException { RemoveRFPFromIndexResponse_type0 object = new RemoveRFPFromIndexResponse_type0(); java.lang.String[] values = content.split(" +"); java.lang.String[] objectValues = new java.lang.String[values.length]; try { for (int i = 0; i < values.length; i++) { objectValues[i] = org.apache.axis2.databinding.utils.ConverterUtil.convertToString(values[i]); } object.setString(objectValues); return object; } catch (java.lang.Exception e) { throw new org.apache.axis2.databinding.ADBException(); } } /** * static method to create the object * Precondition: If this object is an element, the current or next start element starts this object and any intervening reader events are ignorable * If this object is not an element, it is a complex type and the reader is at the event just after the outer start element * Postcondition: If this object is an element, the reader is positioned at its end element * If this object is a complex type, the reader is positioned at the end element of its outer element */ public static RemoveRFPFromIndexResponse_type0 parse( javax.xml.stream.XMLStreamReader reader) throws java.lang.Exception { RemoveRFPFromIndexResponse_type0 object = new RemoveRFPFromIndexResponse_type0(); int event; java.lang.String nillableValue = null; java.lang.String prefix = ""; java.lang.String namespaceuri = ""; try { while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); java.lang.String content = reader.getElementText(); object = RemoveRFPFromIndexResponse_type0.Factory.fromString(reader, content); // Note all attributes that were handled. Used to differ normal attributes // from anyAttributes. java.util.Vector handledAttributes = new java.util.Vector(); } catch (javax.xml.stream.XMLStreamException e) { throw new java.lang.Exception(e); } return object; } } //end of factory class }
package io.happie.cordovaCamera; import android.Manifest; import android.content.Context; import android.content.Intent; import android.content.pm.PackageManager; import android.text.TextUtils; import org.apache.cordova.CallbackContext; import org.apache.cordova.CordovaPlugin; import org.apache.cordova.PluginResult; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.InputStream; import java.io.InputStreamReader; import java.util.ArrayList; import java.util.HashMap; import main.java.com.mindscapehq.android.raygun4android.RaygunClient; public class HappieCamera extends CordovaPlugin { private static CallbackContext staticCallbackContext; static File filesDir; private Context appContext; private static String currentAction = ""; private static final String CAMERA = Manifest.permission.CAMERA; private static final int CAM_REQUEST_CODE = 0; public static Integer quality; static String userId = "nouser"; static String jnId = "noid"; public boolean execute(String action, JSONArray args, CallbackContext callbackContext) throws JSONException { staticCallbackContext = callbackContext; currentAction = action; filesDir = this.cordova.getActivity().getApplicationContext().getFilesDir(); appContext = this.cordova.getActivity().getApplicationContext(); if (action.equals("getProcessingCount")) { String user = args.getString(0); String jnid = args.getString(1); callbackContext.success("{\"count\":" + HappieCameraJSON.GET_ACTIVE_PROCESSES() + ", \"total\":" + HappieCameraJSON.GET_TOTAL_IMAGES(user, jnid) + "}"); return true; } else if (action.equals("writePhotoMeta")) { String user = args.getString(0); String jnid = args.getString(1); JSONArray array = args.getJSONArray(2); for (int i = 0; i < array.length(); i++) { JSONObject item = array.getJSONObject(i); String fileName = item.getString("id"); String json = item.getString("data"); FileOutputStream fos = null; try { String filePath = appContext.getFilesDir() + "/media" + "/" + user + "/" + jnid; File jsonFile = new File(filePath, fileName); fos = new FileOutputStream(jsonFile); fos.write(json.getBytes("UTF-8")); } catch (Exception e) { RaygunClient.send(e); } finally { try { fos.close(); } catch (Exception ex) { RaygunClient.send(ex); } } } callbackContext.success("finished writing json"); return true; } else if (action.equals("readPhotoMeta")) { String user = args.getString(0); String jnid = args.getString(1); String filePath = appContext.getFilesDir() + "/media" + "/" + user + "/" + jnid; File sessionDir = new File(filePath); final ArrayList<String> responseBuffer = new ArrayList<String>(); if (sessionDir.exists()) { File[] files = sessionDir.listFiles(); for (File file : files) { if (file.getName().contains(".json")) { FileInputStream fin = null; try { fin = new FileInputStream(file); responseBuffer.add(convertStreamToString(fin)); } catch (Exception e) { RaygunClient.send(e); } finally { try { fin.close(); } catch (Exception ex) { RaygunClient.send(ex); } } } } } callbackContext.success("[" + TextUtils.join(",", responseBuffer) + "]"); return true; } else if (action.equals("generateThumbnail")) { try { generateThumbnail(args); callbackContext.success("called build thumbnail"); return true; } catch (java.io.IOException e) { return false; } } //handle camera open as last known action else if (cordova.hasPermission(CAMERA)) { quality = args.getInt(0); userId = args.getString(1); jnId = args.getString(2); return willOpenCamera(); } else { quality = args.getInt(0); userId = args.getString(1); jnId = args.getString(2); getCamPermission(CAM_REQUEST_CODE); return true; } } private static String convertStreamToString(InputStream is) throws Exception { BufferedReader reader = null; try { reader = new BufferedReader(new InputStreamReader(is)); StringBuilder sb = new StringBuilder(); String line; while ((line = reader.readLine()) != null) { sb.append(line).append("\n"); } return sb.toString(); } catch (Exception e) { return ""; } finally { try { reader.close(); } catch (Exception e) { RaygunClient.send(e); } } } private boolean willOpenCamera() { try { cordova.getActivity().runOnUiThread(new Runnable() { @Override public void run() { openCamera(); } }); } catch (IllegalArgumentException e) { RaygunClient.send(e); return false; } return true; } private void openCamera() { Intent pictureIntent = new Intent(appContext, io.happie.cordovaCamera.HappieCameraActivity.class); pictureIntent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); appContext.startActivity(pictureIntent); } private void generateThumbnail(final JSONArray args) throws JSONException, java.io.IOException { final Context context = this.cordova.getActivity().getApplicationContext(); cordova.getThreadPool().execute(new Runnable() { public void run() { try { String name = args.getString(0); String user = ""; String jnid = ""; try{ user = args.getString(1); jnid = args.getString(2); } catch(Exception e){ RaygunClient.send(e); } HappieCameraThumb thumbGen = new HappieCameraThumb(); thumbGen.createThumbAtPathWithName(name,user, jnid, context); } catch (Exception e) { RaygunClient.send(e); } } }); } private void getCamPermission(int requestCode) { cordova.requestPermission(this, requestCode, CAMERA); } public void onRequestPermissionResult(int requestCode, String[] permissions, int[] grantResults) throws JSONException { for (int r : grantResults) { if (r == PackageManager.PERMISSION_DENIED) { staticCallbackContext.sendPluginResult(new PluginResult(PluginResult.Status.ERROR, "Permission Denied")); return; } } switch (requestCode) { case CAM_REQUEST_CODE: willOpenCamera(); break; } } static void sessionFinished() { staticCallbackContext.success(); } }
package phosphor.test; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import de.ecspride.BaseClass; import de.ecspride.BaseClass2; import de.ecspride.DataStore; import de.ecspride.Datacontainer; import de.ecspride.General; import de.ecspride.VarA; import de.ecspride.VarB; import edu.columbia.cs.psl.phosphor.runtime.Taint; import edu.columbia.cs.psl.phosphor.runtime.Tainter; import edu.columbia.cs.psl.phosphor.struct.TaintedWithObjTag; public class DroidBenchTest { public static int getTaint(String description) { return Tainter.getTaint(description.toCharArray()[0]); } static int i = 0; public static String taintedString(String string) { return new String(Tainter.taintedCharArray(string.toCharArray(), 5)); } public static String taintedString() { return taintedString(new String("abcdefghi")); } static class TestFieldSensitivity1 { Datacontainer d1; void setTaint(Datacontainer data) { data.setDescription("abcd"); data.setSecret(taintedString("abcdefg")); } void sendTaint() { assert (getTaint(d1.getDescription()) == 0); } } static void testFieldSensitivity1() { TestFieldSensitivity1 t = new TestFieldSensitivity1(); t.d1 = new Datacontainer(); t.setTaint(t.d1); t.sendTaint(); } static void testFieldSensitivity2() { Datacontainer d1 = new Datacontainer(); d1.setDescription("abcd"); d1.setSecret(taintedString("abcdefg")); assert (getTaint(d1.getDescription()) == 0); } static void testFieldSensitivity3() { Datacontainer d1 = new Datacontainer(); d1.setDescription("abcd"); d1.setSecret(taintedString("abcdefg")); assert (getTaint(d1.getSecret()) != 0); } static void testFieldSensitivity4() { Datacontainer d1 = new Datacontainer(); d1.setDescription("abcd"); d1.setDescription(taintedString("abcdefg")); assert (getTaint(d1.getDescription()) != 0); } static void testInheritedObjects1() { int a = 46 + 1; General g; if (a == 47) g = new VarA(); else g = new VarB(); assert (getTaint(g.getInfo()) != 0); } static void testObjectSensitivity1() { LinkedList<String> list1 = new LinkedList<String>(); LinkedList<String> list2 = new LinkedList<String>(); list1.add(taintedString("abcd")); //source list2.add("123"); assert (getTaint(list2.getFirst()) == 0); } static void testObjectSensitivity2() { String var; DataStore ds = new DataStore(); String taintedString = taintedString("abcd"); var = taintedString; ds.field = taintedString; var = "abc"; ds.field = "def"; assert (getTaint(var) == 0); assert (getTaint(ds.field) == 0); } static void testExceptions1() { String imei = ""; try { imei = taintedString("abcd"); throw new RuntimeException(); } catch (RuntimeException ex) { assert (getTaint(imei) != 0); } } static void testExceptions2() { String imei = ""; try { imei = taintedString("abcd"); int[] arr = new int[(int) Math.sqrt(49)]; if (arr[32] > 0) imei = ""; } catch (RuntimeException ex) { assert (getTaint(imei) != 0); } } static void testExceptions3() { String imei = ""; try { imei = taintedString("abcd"); int[] arr = new int[42]; if (arr[32] > 0) imei = ""; } catch (RuntimeException ex) { assert (getTaint(imei) != 0); } } static void testExceptions4() { String imei = ""; try { imei = taintedString("abcd"); throw new RuntimeException(imei); } catch (RuntimeException ex) { assert (getTaint(ex.getMessage()) != 0); } } static void testLoopExample1() { String imei = taintedString("abcd"); String obfuscated = ""; for (char c : imei.toCharArray()) obfuscated += c + "_"; assert (getTaint(obfuscated) != 0); } static void testLoopExample2() { String imei = taintedString("abcd"); String obfuscated = ""; for (int i = 0; i < 10; i++) if (i == 9) for (char c : imei.toCharArray()) obfuscated += c + "_"; assert (getTaint(obfuscated) != 0); } static class SourceCodeSpecific1 { void doTest() { Set<String> phoneNumbers = new HashSet<String>(); phoneNumbers.add("+49 123456"); phoneNumbers.add("+49 654321"); phoneNumbers.add("+49 111111"); phoneNumbers.add("+49 222222"); phoneNumbers.add("+49 333333"); int a = 22 + 11; int b = 22 * 2 - 1 + a; String message = (a == b) ? "no taint" : taintedString("abcd"); //source sendSMS(phoneNumbers, message); } private void sendSMS(Set<String> numbers, String message) { for (String number : numbers) { assert (getTaint(message) != 0); } } } static void testSourceCodeSpecific1() { new SourceCodeSpecific1().doTest(); } public static String im; static void testStaticInitialization1() { im = taintedString(); new StaticInitClass1(); } public static class StaticInitClass1 { static { assert (getTaint(im) != 0); } } public static String im2; static void testStaticInitialization2() { new StaticInitClass2(); assert (getTaint(im2) != 0); } public static class StaticInitClass2 { static { im2 = taintedString(); } } static void testUnreachableCode() { int i = 46 + 1; if (i < 47) { String s = taintedString(); assert (getTaint(s) != 0); } } static class ImplicitFlow4{ public void doTest() { String password = taintedString(); String username = "hanns"; try{ boolean passwordCorrect = lookup(username, password); assert (Tainter.getTaint(passwordCorrect) != 1); }catch(Exception ex){ //should be a sink here ex.printStackTrace(); } //should be a sink here assert(false); //We have no concept of exceptional control flow tainting yet } private boolean lookup(String username, String password) throws Exception{ if(!username.equals("hanns")) throw new Exception("username not available"); else if(username.equals("hanns") && !password.equals("superSecure")) return false; else return true; } } static class ImplicitFlow3 { public void doTest() { ArrayList arrayList = new ArrayList(); LinkedList linkedList = new LinkedList(); ((TaintedWithObjTag)arrayList).setPHOSPHOR_TAG(new Taint("arraylist tag")); ((TaintedWithObjTag)linkedList).setPHOSPHOR_TAG(new Taint("arraylist tag")); leakInformationBit(linkedList); leakInformationBit(arrayList); leakInformationBit(linkedList); } private void leakInformationBit(List list){ if(list instanceof ArrayList) { boolean labeledWithCurrentTag = false; assert (Tainter.getTaint(labeledWithCurrentTag) != 1); } else if(list instanceof LinkedList) { boolean labeledWithCurrentTag = false; assert (Tainter.getTaint(labeledWithCurrentTag) != 1); } } } static class ImplicitFlow1 { public void doTest() { String imei = taintedString("0123456789"); String obfuscatedIMEI = obfuscateIMEI(imei); writeToLog(obfuscatedIMEI); //hard to detect obfuscatedIMEI = reallyHardObfuscatedIMEI(imei); writeToLog(obfuscatedIMEI); } private String obfuscateIMEI(String imei) { String result = ""; for (char c : imei.toCharArray()) { switch (c) { case '0': result += 'a'; break; case '1': result += 'b'; break; case '2': result += 'c'; break; case '3': result += 'd'; break; case '4': result += 'e'; break; case '5': result += 'f'; break; case '6': result += 'g'; break; case '7': result += 'h'; break; case '8': result += 'i'; break; case '9': result += 'j'; break; default: System.err.println("Problem in obfuscateIMEI for character: " + c); } } return result; } private String reallyHardObfuscatedIMEI(String imei) { //ASCII values for integer: 48-57 Integer[] numbers = new Integer[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 4142, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58 }; char[] imeiAsChar = imei.toCharArray(); char[] newOldIMEI = new char[imeiAsChar.length]; for (int i = 0; i < imeiAsChar.length; i++) { newOldIMEI[i] = Character.forDigit(numbers[(int) imeiAsChar[i]], 10); } return newOldIMEI.toString(); } private void writeToLog(String message) { assert (getTaint(message) != 0); } } static void testImplicitFlow1() { new ImplicitFlow1().doTest(); } static boolean passwordCorrect; static void testImplicitFlow2() { String userInputPassword = taintedString("superSecure"); if (userInputPassword.equals("superSecure")) passwordCorrect = true; assert (Tainter.getTaint(passwordCorrect) == 1); } static void testImplicitFlow3() { new ImplicitFlow3().doTest(); } static void testImplicitFlow4() { new ImplicitFlow4().doTest(); } static void testArrayAccess1() { String[] arrayData = new String[3]; arrayData[0] = "abcd"; arrayData[1] = taintedString(); arrayData[2] = "abcd"; assert (getTaint(arrayData[2]) == 0); } static void testArrayAccess2() { String[] arrayData = new String[10]; arrayData[0] = "abcd"; arrayData[4] = "abcd"; arrayData[5] = taintedString(); arrayData[2] = "abcd"; assert (getTaint(arrayData[calculateIndex()]) == 0); } private static int calculateIndex() { int index = 1; index++; index *= 5; index = index % 10; index += 4; return index; } static void testHashMapAccess1() { Map<String, String> map = new HashMap<String, String>(); map.put("tainted", taintedString()); map.put("untainted", "abcd"); assert (getTaint(map.get("untainted")) == 0); assert (getTaint(map.get("tainted")) != 0); } static void testListAccess1() { LinkedList<String> list = new LinkedList<String>(); list.add("b"); list.add(taintedString()); list.add("c"); list.add("d"); assert (getTaint(list.getFirst()) == 0); assert (getTaint(list.get(0)) == 0); assert (getTaint(list.get(1)) != 0); } static void testReflectionTest1() { try { BaseClass bc = (BaseClass) Class.forName("de.ecspride.ConcreteClass").newInstance(); bc.imei = taintedString(); assert (getTaint(bc.imei) != 0); } catch (InstantiationException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IllegalAccessException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (ClassNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } } static void testReflectionTest2() { try { BaseClass bc = (BaseClass) Class.forName("de.ecspride.ConcreteClass").newInstance(); bc.imei = taintedString(); assert (getTaint(bc.foo()) != 0); } catch (InstantiationException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IllegalAccessException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (ClassNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } } static void testReflectionTest3() { try { String imei = taintedString(); Class c = Class.forName("de.ecspride.ReflectiveClass"); Object o = c.newInstance(); Method m = c.getMethod("setIme" + "i", String.class); m.invoke(o, imei); Method m2 = c.getMethod("getImei"); String s = (String) m2.invoke(o); assert (getTaint(s) != 0); } catch (InstantiationException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IllegalAccessException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (ClassNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (NoSuchMethodException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IllegalArgumentException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (InvocationTargetException e) { // TODO Auto-generated catch block e.printStackTrace(); } } static void testReflectionTest4() { try { BaseClass2 bc = (BaseClass2) Class.forName("de.ecspride.ConcreteClass2").newInstance(); String s = bc.foo(); bc.bar(s); } catch (InstantiationException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IllegalAccessException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (ClassNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } } public static void main(String[] args) { for (Method m : DroidBenchTest.class.getDeclaredMethods()) { if (m.getName().startsWith("test")) { System.out.println(m.getName()); try { m.invoke(null); } catch (IllegalAccessException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IllegalArgumentException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (InvocationTargetException e) { // TODO Auto-generated catch block e.printStackTrace(); } } } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.mapreduce; import java.io.IOException; import java.security.PrivilegedExceptionAction; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.RawComparator; import org.apache.hadoop.mapred.JobClient; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.RunningJob; import org.apache.hadoop.mapred.TaskCompletionEvent; import org.apache.hadoop.mapreduce.task.JobContextImpl; /** * The job submitter's view of the Job. It allows the user to configure the * job, submit it, control its execution, and query the state. The set methods * only work until the job is submitted, afterwards they will throw an * IllegalStateException. */ public class Job extends JobContextImpl implements JobContext { public static enum JobState {DEFINE, RUNNING}; private JobState state = JobState.DEFINE; private JobClient jobClient; private RunningJob info; /** * Creates a new {@link Job} * A Job will be created with a generic {@link Configuration}. * * @return the {@link Job} * @throws IOException */ public static Job getInstance() throws IOException { // create with a null Cluster return getInstance(new Configuration()); } /** * Creates a new {@link Job} with a given {@link Configuration}. * * The <code>Job</code> makes a copy of the <code>Configuration</code> so * that any necessary internal modifications do not reflect on the incoming * parameter. * * @param conf the {@link Configuration} * @return the {@link Job} * @throws IOException */ public static Job getInstance(Configuration conf) throws IOException { // create with a null Cluster JobConf jobConf = new JobConf(conf); return new Job(jobConf); } /** * Creates a new {@link Job} with a given {@link Configuration} * and a given jobName. * * The <code>Job</code> makes a copy of the <code>Configuration</code> so * that any necessary internal modifications do not reflect on the incoming * parameter. * * @param conf the {@link Configuration} * @param jobName the job instance's name * @return the {@link Job} * @throws IOException */ public static Job getInstance(Configuration conf, String jobName) throws IOException { // create with a null Cluster Job result = getInstance(conf); result.setJobName(jobName); return result; } public Job() throws IOException { this(new Configuration()); } public Job(Configuration conf) throws IOException { super(conf, null); } public Job(Configuration conf, String jobName) throws IOException { this(conf); setJobName(jobName); } JobClient getJobClient() { return jobClient; } private void ensureState(JobState state) throws IllegalStateException { if (state != this.state) { throw new IllegalStateException("Job in state "+ this.state + " instead of " + state); } if (state == JobState.RUNNING && jobClient == null) { throw new IllegalStateException("Job in state " + JobState.RUNNING + " however jobClient is not initialized!"); } } /** * Set the number of reduce tasks for the job. * @param tasks the number of reduce tasks * @throws IllegalStateException if the job is submitted */ public void setNumReduceTasks(int tasks) throws IllegalStateException { ensureState(JobState.DEFINE); conf.setNumReduceTasks(tasks); } /** * Set the current working directory for the default file system. * * @param dir the new current working directory. * @throws IllegalStateException if the job is submitted */ public void setWorkingDirectory(Path dir) throws IOException { ensureState(JobState.DEFINE); conf.setWorkingDirectory(dir); } /** * Set the {@link InputFormat} for the job. * @param cls the <code>InputFormat</code> to use * @throws IllegalStateException if the job is submitted */ public void setInputFormatClass(Class<? extends InputFormat> cls ) throws IllegalStateException { ensureState(JobState.DEFINE); conf.setClass(INPUT_FORMAT_CLASS_ATTR, cls, InputFormat.class); } /** * Set the {@link OutputFormat} for the job. * @param cls the <code>OutputFormat</code> to use * @throws IllegalStateException if the job is submitted */ public void setOutputFormatClass(Class<? extends OutputFormat> cls ) throws IllegalStateException { ensureState(JobState.DEFINE); conf.setClass(OUTPUT_FORMAT_CLASS_ATTR, cls, OutputFormat.class); } /** * Set the {@link Mapper} for the job. * @param cls the <code>Mapper</code> to use * @throws IllegalStateException if the job is submitted */ public void setMapperClass(Class<? extends Mapper> cls ) throws IllegalStateException { ensureState(JobState.DEFINE); conf.setClass(MAP_CLASS_ATTR, cls, Mapper.class); } /** * Set the Jar by finding where a given class came from. * @param cls the example class */ public void setJarByClass(Class<?> cls) { conf.setJarByClass(cls); } /** * Get the pathname of the job's jar. * @return the pathname */ public String getJar() { return conf.getJar(); } /** * Set the combiner class for the job. * @param cls the combiner to use * @throws IllegalStateException if the job is submitted */ public void setCombinerClass(Class<? extends Reducer> cls ) throws IllegalStateException { ensureState(JobState.DEFINE); conf.setClass(COMBINE_CLASS_ATTR, cls, Reducer.class); } /** * Set the {@link Reducer} for the job. * @param cls the <code>Reducer</code> to use * @throws IllegalStateException if the job is submitted */ public void setReducerClass(Class<? extends Reducer> cls ) throws IllegalStateException { ensureState(JobState.DEFINE); conf.setClass(REDUCE_CLASS_ATTR, cls, Reducer.class); } /** * Set the {@link Partitioner} for the job. * @param cls the <code>Partitioner</code> to use * @throws IllegalStateException if the job is submitted */ public void setPartitionerClass(Class<? extends Partitioner> cls ) throws IllegalStateException { ensureState(JobState.DEFINE); conf.setClass(PARTITIONER_CLASS_ATTR, cls, Partitioner.class); } /** * Set the key class for the map output data. This allows the user to * specify the map output key class to be different than the final output * value class. * * @param theClass the map output key class. * @throws IllegalStateException if the job is submitted */ public void setMapOutputKeyClass(Class<?> theClass ) throws IllegalStateException { ensureState(JobState.DEFINE); conf.setMapOutputKeyClass(theClass); } /** * Set the value class for the map output data. This allows the user to * specify the map output value class to be different than the final output * value class. * * @param theClass the map output value class. * @throws IllegalStateException if the job is submitted */ public void setMapOutputValueClass(Class<?> theClass ) throws IllegalStateException { ensureState(JobState.DEFINE); conf.setMapOutputValueClass(theClass); } /** * Set the key class for the job output data. * * @param theClass the key class for the job output data. * @throws IllegalStateException if the job is submitted */ public void setOutputKeyClass(Class<?> theClass ) throws IllegalStateException { ensureState(JobState.DEFINE); conf.setOutputKeyClass(theClass); } /** * Turn speculative execution on or off for this job. * * @param speculativeExecution <code>true</code> if speculative execution * should be turned on, else <code>false</code>. */ public void setSpeculativeExecution(boolean speculativeExecution) { ensureState(JobState.DEFINE); conf.setSpeculativeExecution(speculativeExecution); } /** * Turn speculative execution on or off for this job for map tasks. * * @param speculativeExecution <code>true</code> if speculative execution * should be turned on for map tasks, * else <code>false</code>. */ public void setMapSpeculativeExecution(boolean speculativeExecution) { ensureState(JobState.DEFINE); conf.setMapSpeculativeExecution(speculativeExecution); } /** * Turn speculative execution on or off for this job for reduce tasks. * * @param speculativeExecution <code>true</code> if speculative execution * should be turned on for reduce tasks, * else <code>false</code>. */ public void setReduceSpeculativeExecution(boolean speculativeExecution) { ensureState(JobState.DEFINE); conf.setReduceSpeculativeExecution(speculativeExecution); } /** * Set the value class for job outputs. * * @param theClass the value class for job outputs. * @throws IllegalStateException if the job is submitted */ public void setOutputValueClass(Class<?> theClass ) throws IllegalStateException { ensureState(JobState.DEFINE); conf.setOutputValueClass(theClass); } /** * Define the comparator that controls how the keys are sorted before they * are passed to the {@link Reducer}. * @param cls the raw comparator * @throws IllegalStateException if the job is submitted * @see #setCombinerKeyGroupingComparatorClass(Class) */ public void setSortComparatorClass(Class<? extends RawComparator> cls ) throws IllegalStateException { ensureState(JobState.DEFINE); conf.setOutputKeyComparatorClass(cls); } /** * Define the comparator that controls which keys are grouped together * for a single call to combiner, * {@link Reducer#reduce(Object, Iterable, * org.apache.hadoop.mapreduce.Reducer.Context)} * * @param cls the raw comparator to use * @throws IllegalStateException if the job is submitted */ public void setCombinerKeyGroupingComparatorClass( Class<? extends RawComparator> cls) throws IllegalStateException { ensureState(JobState.DEFINE); conf.setCombinerKeyGroupingComparator(cls); } /** * Define the comparator that controls which keys are grouped together * for a single call to * {@link Reducer#reduce(Object, Iterable, * org.apache.hadoop.mapreduce.Reducer.Context)} * @param cls the raw comparator to use * @throws IllegalStateException if the job is submitted * @see #setCombinerKeyGroupingComparatorClass(Class) */ public void setGroupingComparatorClass(Class<? extends RawComparator> cls ) throws IllegalStateException { ensureState(JobState.DEFINE); conf.setOutputValueGroupingComparator(cls); } /** * Set the user-specified job name. * * @param name the job's new name. * @throws IllegalStateException if the job is submitted */ public void setJobName(String name) throws IllegalStateException { ensureState(JobState.DEFINE); conf.setJobName(name); } /** * Set the boolean property for specifying which classpath takes precedence - * the user's one or the system one, when the tasks are launched * @param value pass true if user's classes should take precedence */ public void setUserClassesTakesPrecedence(boolean value) { ensureState(JobState.DEFINE); conf.setUserClassesTakesPrecedence(value); } /** * Get the URL where some job progress information will be displayed. * * @return the URL where some job progress information will be displayed. */ public String getTrackingURL() { ensureState(JobState.RUNNING); return info.getTrackingURL(); } /** * Get the <i>progress</i> of the job's setup, as a float between 0.0 * and 1.0. When the job setup is completed, the function returns 1.0. * * @return the progress of the job's setup. * @throws IOException */ public float setupProgress() throws IOException { ensureState(JobState.RUNNING); return info.setupProgress(); } /** * Get the <i>progress</i> of the job's map-tasks, as a float between 0.0 * and 1.0. When all map tasks have completed, the function returns 1.0. * * @return the progress of the job's map-tasks. * @throws IOException */ public float mapProgress() throws IOException { ensureState(JobState.RUNNING); return info.mapProgress(); } /** * Get the <i>progress</i> of the job's reduce-tasks, as a float between 0.0 * and 1.0. When all reduce tasks have completed, the function returns 1.0. * * @return the progress of the job's reduce-tasks. * @throws IOException */ public float reduceProgress() throws IOException { ensureState(JobState.RUNNING); return info.reduceProgress(); } /** * Check if the job is finished or not. * This is a non-blocking call. * * @return <code>true</code> if the job is complete, else <code>false</code>. * @throws IOException */ public boolean isComplete() throws IOException { ensureState(JobState.RUNNING); return info.isComplete(); } /** * Check if the job completed successfully. * * @return <code>true</code> if the job succeeded, else <code>false</code>. * @throws IOException */ public boolean isSuccessful() throws IOException { ensureState(JobState.RUNNING); return info.isSuccessful(); } /** * Kill the running job. Blocks until all job tasks have been * killed as well. If the job is no longer running, it simply returns. * * @throws IOException */ public void killJob() throws IOException { ensureState(JobState.RUNNING); info.killJob(); } /** * Get events indicating completion (success/failure) of component tasks. * * @param startFrom index to start fetching events from * @return an array of {@link TaskCompletionEvent}s * @throws IOException */ public TaskCompletionEvent[] getTaskCompletionEvents(int startFrom ) throws IOException { ensureState(JobState.RUNNING); return info.getTaskCompletionEvents(startFrom); } /** * Kill indicated task attempt. * * @param taskId the id of the task to be terminated. * @throws IOException */ public void killTask(TaskAttemptID taskId) throws IOException { ensureState(JobState.RUNNING); info.killTask(org.apache.hadoop.mapred.TaskAttemptID.downgrade(taskId), false); } /** * Fail indicated task attempt. * * @param taskId the id of the task to be terminated. * @throws IOException */ public void failTask(TaskAttemptID taskId) throws IOException { ensureState(JobState.RUNNING); info.killTask(org.apache.hadoop.mapred.TaskAttemptID.downgrade(taskId), true); } /** * Gets the counters for this job. * * @return the counters for this job. * @throws IOException */ public Counters getCounters() throws IOException { ensureState(JobState.RUNNING); return new Counters(info.getCounters()); } private void ensureNotSet(String attr, String msg) throws IOException { if (conf.get(attr) != null) { throw new IOException(attr + " is incompatible with " + msg + " mode."); } } /** * Sets the flag that will allow the JobTracker to cancel the HDFS delegation * tokens upon job completion. Defaults to true. */ public void setCancelDelegationTokenUponJobCompletion(boolean value) { ensureState(JobState.DEFINE); conf.setBoolean(JOB_CANCEL_DELEGATION_TOKEN, value); } /** * Default to the new APIs unless they are explicitly set or the old mapper or * reduce attributes are used. * @throws IOException if the configuration is inconsistant */ private void setUseNewAPI() throws IOException { int numReduces = conf.getNumReduceTasks(); String oldMapperClass = "mapred.mapper.class"; String oldReduceClass = "mapred.reducer.class"; conf.setBooleanIfUnset("mapred.mapper.new-api", conf.get(oldMapperClass) == null); if (conf.getUseNewMapper()) { String mode = "new map API"; ensureNotSet("mapred.input.format.class", mode); ensureNotSet(oldMapperClass, mode); if (numReduces != 0) { ensureNotSet("mapred.partitioner.class", mode); } else { ensureNotSet("mapred.output.format.class", mode); } } else { String mode = "map compatability"; ensureNotSet(INPUT_FORMAT_CLASS_ATTR, mode); ensureNotSet(MAP_CLASS_ATTR, mode); if (numReduces != 0) { ensureNotSet(PARTITIONER_CLASS_ATTR, mode); } else { ensureNotSet(OUTPUT_FORMAT_CLASS_ATTR, mode); } } if (numReduces != 0) { conf.setBooleanIfUnset("mapred.reducer.new-api", conf.get(oldReduceClass) == null); if (conf.getUseNewReducer()) { String mode = "new reduce API"; ensureNotSet("mapred.output.format.class", mode); ensureNotSet(oldReduceClass, mode); } else { String mode = "reduce compatability"; ensureNotSet(OUTPUT_FORMAT_CLASS_ATTR, mode); ensureNotSet(REDUCE_CLASS_ATTR, mode); } } } /** * Submit the job to the cluster and return immediately. * @throws IOException */ public void submit() throws IOException, InterruptedException, ClassNotFoundException { ensureState(JobState.DEFINE); setUseNewAPI(); // Connect to the JobTracker and submit the job connect(); info = jobClient.submitJobInternal(conf); super.setJobID(info.getID()); state = JobState.RUNNING; } /** * Open a connection to the JobTracker * @throws IOException * @throws InterruptedException */ private void connect() throws IOException, InterruptedException { ugi.doAs(new PrivilegedExceptionAction<Object>() { public Object run() throws IOException { jobClient = new JobClient((JobConf) getConfiguration()); return null; } }); } /** * Submit the job to the cluster and wait for it to finish. * @param verbose print the progress to the user * @return true if the job succeeded * @throws IOException thrown if the communication with the * <code>JobTracker</code> is lost */ public boolean waitForCompletion(boolean verbose ) throws IOException, InterruptedException, ClassNotFoundException { if (state == JobState.DEFINE) { submit(); } if (verbose) { jobClient.monitorAndPrintJob(conf, info); } else { info.waitForCompletion(); } return isSuccessful(); } }
/* Copyright (c) 2013-2016 Boundless and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Distribution License v1.0 * which accompanies this distribution, and is available at * https://www.eclipse.org/org/documents/edl-v10.html * * Contributors: * Gabriel Roldan (Boundless) - initial implementation */ package org.locationtech.geogig.data; import java.util.AbstractList; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import org.eclipse.jdt.annotation.Nullable; import org.geotools.feature.GeometryAttributeImpl; import org.geotools.feature.type.AttributeDescriptorImpl; import org.geotools.feature.type.Types; import org.geotools.geometry.jts.ReferencedEnvelope; import org.geotools.util.Converters; import org.geotools.util.Utilities; import org.locationtech.geogig.model.RevFeature; import org.locationtech.geogig.model.RevFeatureType; import org.opengis.feature.GeometryAttribute; import org.opengis.feature.IllegalAttributeException; import org.opengis.feature.Property; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import org.opengis.feature.type.AttributeDescriptor; import org.opengis.feature.type.AttributeType; import org.opengis.feature.type.GeometryDescriptor; import org.opengis.feature.type.GeometryType; import org.opengis.feature.type.Name; import org.opengis.feature.type.PropertyDescriptor; import org.opengis.filter.identity.FeatureId; import org.opengis.filter.identity.Identifier; import org.opengis.geometry.BoundingBox; import org.opengis.referencing.crs.CoordinateReferenceSystem; import com.google.common.base.Objects; import com.google.common.base.Preconditions; import com.google.common.collect.Maps; import com.vividsolutions.jts.geom.Envelope; import com.vividsolutions.jts.geom.Geometry; import com.vividsolutions.jts.geom.GeometryFactory; /** */ class GeogigSimpleFeature implements SimpleFeature { static final class State { private static final Object NULL = new Object(); private final RevFeature feature; private final Object[] values; State(RevFeature feature) { this.feature = feature; this.values = new Object[feature.size()]; Arrays.fill(values, NULL); } public void set(int index, Object value) { values[index] = value; } public Object get(int index) { Object v = values[index]; if (v == NULL) { v = feature.get(index).orNull(); values[index] = v; } return v; } public Geometry get(int index, GeometryFactory gf) { Object v = values[index]; if (v == NULL) { v = feature.get(index, gf).orNull(); values[index] = v; } return (Geometry) v; } public void getBounds(Envelope bounds) { feature.forEach((v) -> { if (v instanceof Geometry) bounds.expandToInclude(((Geometry) v).getEnvelopeInternal()); }); } } private final State state; private final FeatureId id; private SimpleFeatureType featureType; /** * The attribute name -> position index */ private Map<String, Integer> nameToRevTypeIndex; /** * The set of user data attached to the feature (lazily created) */ private Map<Object, Object> userData; /** * The set of user data attached to each attribute (lazily created) */ private Map<Object, Object>[] attributeUserData; private final int defaultGeomIndex; // // private final boolean defaultGeomIsPoint; private @Nullable GeometryFactory geomFac; GeogigSimpleFeature(RevFeature feature, SimpleFeatureType featureType, FeatureId id, Map<String, Integer> nameToRevTypeInded) { this(feature, featureType, id, nameToRevTypeInded, null); } GeogigSimpleFeature(RevFeature feature, SimpleFeatureType featureType, FeatureId id, Map<String, Integer> nameToRevTypeInded, @Nullable GeometryFactory geomFac) { this.id = id; this.featureType = featureType; this.geomFac = geomFac; this.state = new GeogigSimpleFeature.State(feature); this.nameToRevTypeIndex = nameToRevTypeInded; Integer defaultGeomIndex = nameToRevTypeInded.get(null); if (defaultGeomIndex == null) { this.defaultGeomIndex = -1; // defaultGeomIsPoint = false; } else { this.defaultGeomIndex = defaultGeomIndex.intValue(); // Class<?> binding = featureType.getGeometryDescriptor().getType().getBinding(); // defaultGeomIsPoint = Point.class.isAssignableFrom(binding); } } @Override public FeatureId getIdentifier() { return id; } @Override public String getID() { return id.getID(); } @Override public Object getAttribute(int index) throws IndexOutOfBoundsException { // @TODO: reoptimize // if (node != null && index == defaultGeomIndex && defaultGeomIsPoint // && (resolvedValues == null || resolvedValues instanceof ImmutableList)) { // Envelope e = new Envelope(); // node.expand(e); // if (e.isNull()) { // return null; // } // return DEFAULT_GEOM_FACTORY.createPoint(new Coordinate(e.getMinX(), e.getMinY())); // } Object value; State state = this.state; if (index == defaultGeomIndex && geomFac != null) { value = state.get(index, geomFac); } else { value = state.get(index); } return value; } @Override public Object getAttribute(String name) { Integer index = nameToRevTypeIndex.get(name); if (index == null) { return null; } return getAttribute(index.intValue()); } @Override public Object getAttribute(Name name) { return getAttribute(name.getLocalPart()); } @Override public int getAttributeCount() { return featureType.getAttributeCount(); } @Override public List<Object> getAttributes() { final int attributeCount = getAttributeCount(); List<Object> atts = new ArrayList<Object>(attributeCount); for (int i = 0; i < attributeCount; i++) { atts.add(getAttribute(i)); } return atts; } @Override public Object getDefaultGeometry() { // should be specified in the index as the default key (null) Integer idx = nameToRevTypeIndex.get(null); Object defaultGeometry = idx == null ? null : getAttribute(idx.intValue()); // not found? do we have a default geometry at all? if (defaultGeometry == null) { GeometryDescriptor geometryDescriptor = featureType.getGeometryDescriptor(); if (geometryDescriptor != null) { Integer defaultGeomIndex = nameToRevTypeIndex .get(geometryDescriptor.getName().getLocalPart()); defaultGeometry = getAttribute(defaultGeomIndex.intValue()); } } return defaultGeometry; } @Override public SimpleFeatureType getFeatureType() { return featureType; } @Override public SimpleFeatureType getType() { return featureType; } @Override public void setAttribute(int index, Object value) throws IndexOutOfBoundsException { // first do conversion Class<?> binding = getFeatureType().getDescriptor(index).getType().getBinding(); Object converted = Converters.convert(value, binding); if (converted == null && value != null) { throw new IllegalArgumentException( String.format("Unable to convert value to %s: %s", binding.getName(), value)); } if (converted != null && !binding.isAssignableFrom(converted.getClass())) { throw new IllegalArgumentException(String.format("%s is not assignable to %s: %s", converted.getClass().getName(), binding.getName(), value)); } // finally set the value into the feature state.set(index, converted); } @Override public void setAttribute(String name, Object value) { final Integer revTypeIndex = nameToRevTypeIndex.get(name); if (revTypeIndex == null) { throw new IllegalAttributeException(null, "Unknown attribute " + name); } setAttribute(revTypeIndex.intValue(), value); } @Override public void setAttribute(Name name, Object value) { setAttribute(name.getLocalPart(), value); } @Override public void setAttributes(List<Object> values) { for (int i = 0; i < values.size(); i++) { setAttribute(i, values.get(i)); } } @Override public void setAttributes(Object[] values) { setAttributes(Arrays.asList(values)); } @Override public void setDefaultGeometry(Object geometry) { Preconditions.checkArgument(geometry == null || geometry instanceof Geometry); Integer geometryIndex = nameToRevTypeIndex.get(null); if (geometryIndex != null) { setAttribute(geometryIndex.intValue(), geometry); } } @Override public BoundingBox getBounds() { CoordinateReferenceSystem crs = featureType.getCoordinateReferenceSystem(); Envelope bounds = ReferencedEnvelope.create(crs); state.getBounds(bounds); // { // if (node == null) { // Optional<Object> o; // List<Optional<Object>> values = getValues(); // for (int i = 0; i < values.size(); i++) { // o = values.get(i); // if (o.isPresent() && o.get() instanceof Geometry) { // Geometry g = (Geometry) o.get(); // // TODO: check userData for crs... and ensure its of the same // // crs as the feature type // if (bounds.isNull()) { // bounds.init(JTS.bounds(g, crs)); // } else { // bounds.expandToInclude(JTS.bounds(g, crs)); // } // } // } // } else { // node.expand(bounds); // } // } return (BoundingBox) bounds; } @Override public GeometryAttribute getDefaultGeometryProperty() { GeometryDescriptor geometryDescriptor = featureType.getGeometryDescriptor(); GeometryAttribute geometryAttribute = null; if (geometryDescriptor != null) { Object defaultGeometry = getDefaultGeometry(); geometryAttribute = new GeometryAttributeImpl(defaultGeometry, geometryDescriptor, null); } return geometryAttribute; } @Override public void setDefaultGeometryProperty(GeometryAttribute geometryAttribute) { if (geometryAttribute == null) { setDefaultGeometry(null); } else { setDefaultGeometry(geometryAttribute.getValue()); } } @Override public Collection<Property> getProperties() { return new AttributeList(); } @Override public Collection<Property> getProperties(Name name) { return getProperties(name.getLocalPart()); } @Override public Collection<Property> getProperties(String name) { final Integer idx = nameToRevTypeIndex.get(name); if (idx != null) { // cast temporarily to a plain collection to avoid type problems with generics Collection<Property> c = Collections.singleton((Property) new Attribute(idx)); return c; } else { return Collections.emptyList(); } } @Override public Property getProperty(Name name) { return getProperty(name.getLocalPart()); } @Override public Property getProperty(String name) { AttributeDescriptor descriptor = featureType.getDescriptor(name); if (descriptor == null) { return null; } else { Integer index = nameToRevTypeIndex.get(name).intValue(); if (descriptor instanceof GeometryDescriptor) { Object value = getAttribute(index); return new GeometryAttributeImpl(value, (GeometryDescriptor) descriptor, null); } else { return new Attribute(index); } } } @Override public Collection<? extends Property> getValue() { return getProperties(); } @Override public void setValue(Collection<Property> values) { int index = 0; for (Property p : values) { setAttribute(index, p.getValue()); index++; } } @SuppressWarnings("unchecked") @Override public void setValue(Object newValue) { setValue((Collection<Property>) newValue); } /** * @see org.opengis.feature.Attribute#getDescriptor() */ @Override public AttributeDescriptor getDescriptor() { return new AttributeDescriptorImpl(featureType, featureType.getName(), 0, Integer.MAX_VALUE, true, null); } /** * @return same name than this feature's {@link SimpleFeatureType} * @see org.opengis.feature.Property#getName() */ @Override public Name getName() { return featureType.getName(); } @Override public boolean isNillable() { return true; } @Override public Map<Object, Object> getUserData() { if (userData == null) { userData = Maps.newHashMap(); } return userData; } /** * returns a unique code for this feature * * @return A unique int */ public int hashCode() { return id.hashCode() * featureType.hashCode(); } /** * override of equals. Returns if the passed in object is equal to this. * * @param obj the Object to test for equality. * * @return <code>true</code> if the object is equal, <code>false</code> otherwise. */ @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (obj == this) { return true; } if (!(obj instanceof GeogigSimpleFeature)) { return false; } GeogigSimpleFeature feat = (GeogigSimpleFeature) obj; if (!id.equals(feat.getIdentifier())) { return false; } if (!feat.getFeatureType().equals(featureType)) { return false; } for (int i = 0, ii = getAttributeCount(); i < ii; i++) { Object otherAtt = feat.getAttribute(i); if (!Objects.equal(otherAtt, getAttribute(i))) { return false; } } return true; } /** * Live collection backed directly on the value array */ class AttributeList extends AbstractList<Property> { @Override public Property get(int index) { AttributeDescriptor descriptor = featureType.getDescriptor(index); if (descriptor instanceof GeometryDescriptor) { return new SimpleGeometryAttribute(index); } return new Attribute(index); } @Override public Attribute set(int index, Property element) { setAttribute(index, element.getValue()); return null; } @Override public int size() { return getAttributeCount(); } } @Override public String toString() { StringBuilder sb = new StringBuilder(getClass().getSimpleName()); sb.append(getType().getName().getLocalPart()); sb.append('='); sb.append(getValue()); return sb.toString(); } /** * Attribute that delegates directly to the value array */ class Attribute implements org.opengis.feature.Attribute { int index; Attribute(int index) { this.index = index; } @Override public Identifier getIdentifier() { return null; } @Override public AttributeDescriptor getDescriptor() { return featureType.getDescriptor(index); } @Override public AttributeType getType() { return featureType.getType(index); } @Override public Name getName() { return getDescriptor().getName(); } @SuppressWarnings("unchecked") @Override public Map<Object, Object> getUserData() { // lazily create the user data holder if (attributeUserData == null) attributeUserData = new HashMap[getAttributeCount()]; // lazily create the attribute user data if (attributeUserData[index] == null) attributeUserData[index] = new HashMap<Object, Object>(); return attributeUserData[index]; } @Override public Object getValue() { return getAttribute(this.index); } @Override public boolean isNillable() { return getDescriptor().isNillable(); } @Override public void setValue(Object newValue) { setAttribute(this.index, newValue); } /** * Override of hashCode; uses descriptor name to agree with AttributeImpl * * @return hashCode for this object. */ @Override public int hashCode() { Object value = getValue(); return 37 * getDescriptor().hashCode() + (37 * (value == null ? 0 : value.hashCode())); } /** * Override of equals. * * @param other the object to be tested for equality. * * @return whether other is equal to this attribute Type. */ @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (!(obj instanceof Attribute)) { return false; } Attribute other = (Attribute) obj; if (!Utilities.equals(getDescriptor(), other.getDescriptor())) { return false; } if (!Utilities.deepEquals(getValue(), other.getValue())) { return false; } return Utilities.equals(getIdentifier(), other.getIdentifier()); } @Override public void validate() { Types.validate(getDescriptor(), getValue()); } @Override public String toString() { StringBuilder sb = new StringBuilder(getClass().getSimpleName()).append(": "); sb.append(getDescriptor().getName().getLocalPart()); if (!getDescriptor().getName().getLocalPart() .equals(getDescriptor().getType().getName().getLocalPart()) || id != null) { sb.append('<'); sb.append(getDescriptor().getType().getName().getLocalPart()); if (id != null) { sb.append(" id="); sb.append(id); } sb.append('>'); } sb.append('='); sb.append(getValue()); return sb.toString(); } } class SimpleGeometryAttribute extends Attribute implements GeometryAttribute { SimpleGeometryAttribute(int index) { super(index); } @Override public GeometryType getType() { return (GeometryType) super.getType(); } @Override public GeometryDescriptor getDescriptor() { return (GeometryDescriptor) super.getDescriptor(); } @Override public BoundingBox getBounds() { ReferencedEnvelope bounds = new ReferencedEnvelope( featureType.getCoordinateReferenceSystem()); Object value = getAttribute(index); if (value instanceof Geometry) { bounds.init(((Geometry) value).getEnvelopeInternal()); } return bounds; } @Override public void setBounds(BoundingBox bounds) { // do nothing, this property is strictly derived. Shall throw unsupported operation // exception? } @Override public int hashCode() { return 17 * super.hashCode(); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (!(obj instanceof SimpleGeometryAttribute)) { return false; } return super.equals(obj); } } public static Map<String, Integer> buildAttNameToRevTypeIndex(RevFeatureType revType) { List<PropertyDescriptor> sortedDescriptors = revType.descriptors(); Map<String, Integer> typeAttNameToRevTypeIndex = Maps.newHashMap(); final GeometryDescriptor defaultGeometry = ((SimpleFeatureType) revType.type()) .getGeometryDescriptor(); for (int revFeatureIndex = 0; revFeatureIndex < sortedDescriptors .size(); revFeatureIndex++) { PropertyDescriptor prop = sortedDescriptors.get(revFeatureIndex); typeAttNameToRevTypeIndex.put(prop.getName().getLocalPart(), Integer.valueOf(revFeatureIndex)); if (prop.equals(defaultGeometry)) { typeAttNameToRevTypeIndex.put(null, Integer.valueOf(revFeatureIndex)); } } return typeAttNameToRevTypeIndex; } @Override public void validate() throws IllegalAttributeException { for (int i = 0; i < getAttributeCount(); i++) { AttributeDescriptor descriptor = getType().getDescriptor(i); Types.validate(descriptor, getAttribute(i)); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.codec.binary; import java.nio.ByteBuffer; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import org.apache.commons.codec.BinaryDecoder; import org.apache.commons.codec.BinaryEncoder; import org.apache.commons.codec.CharEncoding; import org.apache.commons.codec.DecoderException; import org.apache.commons.codec.EncoderException; /** * Converts hexadecimal Strings. The Charset used for certain operation can be set, the default is set in * {@link #DEFAULT_CHARSET_NAME} * * This class is thread-safe. * * @since 1.1 */ public class Hex implements BinaryEncoder, BinaryDecoder { /** * Default charset is {@link StandardCharsets#UTF_8}. * * @since 1.7 */ public static final Charset DEFAULT_CHARSET = StandardCharsets.UTF_8; /** * Default charset name is {@link CharEncoding#UTF_8}. * * @since 1.4 */ public static final String DEFAULT_CHARSET_NAME = CharEncoding.UTF_8; /** * Used to build output as hex. */ private static final char[] DIGITS_LOWER = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f' }; /** * Used to build output as hex. */ private static final char[] DIGITS_UPPER = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F' }; /** * Converts an array of characters representing hexadecimal values into an array of bytes of those same values. The * returned array will be half the length of the passed array, as it takes two characters to represent any given * byte. An exception is thrown if the passed char array has an odd number of elements. * * @param data An array of characters containing hexadecimal digits * @return A byte array containing binary data decoded from the supplied char array. * @throws DecoderException Thrown if an odd number of characters or illegal characters are supplied */ public static byte[] decodeHex(final char[] data) throws DecoderException { final byte[] out = new byte[data.length >> 1]; decodeHex(data, out, 0); return out; } /** * Converts an array of characters representing hexadecimal values into an array of bytes of those same values. The * returned array will be half the length of the passed array, as it takes two characters to represent any given * byte. An exception is thrown if the passed char array has an odd number of elements. * * @param data An array of characters containing hexadecimal digits * @param out A byte array to contain the binary data decoded from the supplied char array. * @param outOffset The position within {@code out} to start writing the decoded bytes. * @return the number of bytes written to {@code out}. * @throws DecoderException Thrown if an odd number of characters or illegal characters are supplied * @since 1.15 */ public static int decodeHex(final char[] data, final byte[] out, final int outOffset) throws DecoderException { final int len = data.length; if ((len & 0x01) != 0) { throw new DecoderException("Odd number of characters."); } final int outLen = len >> 1; if (out.length - outOffset < outLen) { throw new DecoderException("Output array is not large enough to accommodate decoded data."); } // two characters form the hex value. for (int i = outOffset, j = 0; j < len; i++) { int f = toDigit(data[j], j) << 4; j++; f = f | toDigit(data[j], j); j++; out[i] = (byte) (f & 0xFF); } return outLen; } /** * Converts a String representing hexadecimal values into an array of bytes of those same values. The returned array * will be half the length of the passed String, as it takes two characters to represent any given byte. An * exception is thrown if the passed String has an odd number of elements. * * @param data A String containing hexadecimal digits * @return A byte array containing binary data decoded from the supplied char array. * @throws DecoderException Thrown if an odd number of characters or illegal characters are supplied * @since 1.11 */ public static byte[] decodeHex(final String data) throws DecoderException { return decodeHex(data.toCharArray()); } /** * Converts an array of bytes into an array of characters representing the hexadecimal values of each byte in order. * The returned array will be double the length of the passed array, as it takes two characters to represent any * given byte. * * @param data a byte[] to convert to hex characters * @return A char[] containing lower-case hexadecimal characters */ public static char[] encodeHex(final byte[] data) { return encodeHex(data, true); } /** * Converts an array of bytes into an array of characters representing the hexadecimal values of each byte in order. * The returned array will be double the length of the passed array, as it takes two characters to represent any * given byte. * * @param data a byte[] to convert to Hex characters * @param toLowerCase {@code true} converts to lowercase, {@code false} to uppercase * @return A char[] containing hexadecimal characters in the selected case * @since 1.4 */ public static char[] encodeHex(final byte[] data, final boolean toLowerCase) { return encodeHex(data, toLowerCase ? DIGITS_LOWER : DIGITS_UPPER); } /** * Converts an array of bytes into an array of characters representing the hexadecimal values of each byte in order. * The returned array will be double the length of the passed array, as it takes two characters to represent any * given byte. * * @param data a byte[] to convert to hex characters * @param toDigits the output alphabet (must contain at least 16 chars) * @return A char[] containing the appropriate characters from the alphabet For best results, this should be either * upper- or lower-case hex. * @since 1.4 */ protected static char[] encodeHex(final byte[] data, final char[] toDigits) { final int dataLength = data.length; final char[] out = new char[dataLength << 1]; encodeHex(data, 0, dataLength, toDigits, out, 0); return out; } /** * Converts an array of bytes into an array of characters representing the hexadecimal values of each byte in order. * * @param data a byte[] to convert to hex characters * @param dataOffset the position in {@code data} to start encoding from * @param dataLen the number of bytes from {@code dataOffset} to encode * @param toLowerCase {@code true} converts to lowercase, {@code false} to uppercase * @return A char[] containing the appropriate characters from the alphabet For best results, this should be either * upper- or lower-case hex. * @since 1.15 */ public static char[] encodeHex(final byte[] data, final int dataOffset, final int dataLen, final boolean toLowerCase) { final char[] out = new char[dataLen << 1]; encodeHex(data, dataOffset, dataLen, toLowerCase ? DIGITS_LOWER : DIGITS_UPPER, out, 0); return out; } /** * Converts an array of bytes into an array of characters representing the hexadecimal values of each byte in order. * * @param data a byte[] to convert to hex characters * @param dataOffset the position in {@code data} to start encoding from * @param dataLen the number of bytes from {@code dataOffset} to encode * @param toLowerCase {@code true} converts to lowercase, {@code false} to uppercase * @param out a char[] which will hold the resultant appropriate characters from the alphabet. * @param outOffset the position within {@code out} at which to start writing the encoded characters. * @since 1.15 */ public static void encodeHex(final byte[] data, final int dataOffset, final int dataLen, final boolean toLowerCase, final char[] out, final int outOffset) { encodeHex(data, dataOffset, dataLen, toLowerCase ? DIGITS_LOWER : DIGITS_UPPER, out, outOffset); } /** * Converts an array of bytes into an array of characters representing the hexadecimal values of each byte in order. * * @param data a byte[] to convert to hex characters * @param dataOffset the position in {@code data} to start encoding from * @param dataLen the number of bytes from {@code dataOffset} to encode * @param toDigits the output alphabet (must contain at least 16 chars) * @param out a char[] which will hold the resultant appropriate characters from the alphabet. * @param outOffset the position within {@code out} at which to start writing the encoded characters. */ private static void encodeHex(final byte[] data, final int dataOffset, final int dataLen, final char[] toDigits, final char[] out, final int outOffset) { // two characters form the hex value. for (int i = dataOffset, j = outOffset; i < dataOffset + dataLen; i++) { out[j++] = toDigits[(0xF0 & data[i]) >>> 4]; out[j++] = toDigits[0x0F & data[i]]; } } /** * Converts a byte buffer into an array of characters representing the hexadecimal values of each byte in order. The * returned array will be double the length of the passed array, as it takes two characters to represent any given * byte. * * <p>All bytes identified by {@link ByteBuffer#remaining()} will be used; after this method * the value {@link ByteBuffer#remaining() remaining()} will be zero.</p> * * @param data a byte buffer to convert to hex characters * @return A char[] containing lower-case hexadecimal characters * @since 1.11 */ public static char[] encodeHex(final ByteBuffer data) { return encodeHex(data, true); } /** * Converts a byte buffer into an array of characters representing the hexadecimal values of each byte in order. The * returned array will be double the length of the passed array, as it takes two characters to represent any given * byte. * * <p>All bytes identified by {@link ByteBuffer#remaining()} will be used; after this method * the value {@link ByteBuffer#remaining() remaining()} will be zero.</p> * * @param data a byte buffer to convert to hex characters * @param toLowerCase {@code true} converts to lowercase, {@code false} to uppercase * @return A char[] containing hexadecimal characters in the selected case * @since 1.11 */ public static char[] encodeHex(final ByteBuffer data, final boolean toLowerCase) { return encodeHex(data, toLowerCase ? DIGITS_LOWER : DIGITS_UPPER); } /** * Converts a byte buffer into an array of characters representing the hexadecimal values of each byte in order. The * returned array will be double the length of the passed array, as it takes two characters to represent any given * byte. * * <p>All bytes identified by {@link ByteBuffer#remaining()} will be used; after this method * the value {@link ByteBuffer#remaining() remaining()} will be zero.</p> * * @param byteBuffer a byte buffer to convert to hex characters * @param toDigits the output alphabet (must be at least 16 characters) * @return A char[] containing the appropriate characters from the alphabet For best results, this should be either * upper- or lower-case hex. * @since 1.11 */ protected static char[] encodeHex(final ByteBuffer byteBuffer, final char[] toDigits) { return encodeHex(toByteArray(byteBuffer), toDigits); } /** * Converts an array of bytes into a String representing the hexadecimal values of each byte in order. The returned * String will be double the length of the passed array, as it takes two characters to represent any given byte. * * @param data a byte[] to convert to hex characters * @return A String containing lower-case hexadecimal characters * @since 1.4 */ public static String encodeHexString(final byte[] data) { return new String(encodeHex(data)); } /** * Converts an array of bytes into a String representing the hexadecimal values of each byte in order. The returned * String will be double the length of the passed array, as it takes two characters to represent any given byte. * * @param data a byte[] to convert to hex characters * @param toLowerCase {@code true} converts to lowercase, {@code false} to uppercase * @return A String containing lower-case hexadecimal characters * @since 1.11 */ public static String encodeHexString(final byte[] data, final boolean toLowerCase) { return new String(encodeHex(data, toLowerCase)); } /** * Converts a byte buffer into a String representing the hexadecimal values of each byte in order. The returned * String will be double the length of the passed array, as it takes two characters to represent any given byte. * * <p>All bytes identified by {@link ByteBuffer#remaining()} will be used; after this method * the value {@link ByteBuffer#remaining() remaining()} will be zero.</p> * * @param data a byte buffer to convert to hex characters * @return A String containing lower-case hexadecimal characters * @since 1.11 */ public static String encodeHexString(final ByteBuffer data) { return new String(encodeHex(data)); } /** * Converts a byte buffer into a String representing the hexadecimal values of each byte in order. The returned * String will be double the length of the passed array, as it takes two characters to represent any given byte. * * <p>All bytes identified by {@link ByteBuffer#remaining()} will be used; after this method * the value {@link ByteBuffer#remaining() remaining()} will be zero.</p> * * @param data a byte buffer to convert to hex characters * @param toLowerCase {@code true} converts to lowercase, {@code false} to uppercase * @return A String containing lower-case hexadecimal characters * @since 1.11 */ public static String encodeHexString(final ByteBuffer data, final boolean toLowerCase) { return new String(encodeHex(data, toLowerCase)); } /** * Convert the byte buffer to a byte array. All bytes identified by * {@link ByteBuffer#remaining()} will be used. * * @param byteBuffer the byte buffer * @return the byte[] */ private static byte[] toByteArray(final ByteBuffer byteBuffer) { final int remaining = byteBuffer.remaining(); // Use the underlying buffer if possible if (byteBuffer.hasArray()) { final byte[] byteArray = byteBuffer.array(); if (remaining == byteArray.length) { byteBuffer.position(remaining); return byteArray; } } // Copy the bytes final byte[] byteArray = new byte[remaining]; byteBuffer.get(byteArray); return byteArray; } /** * Converts a hexadecimal character to an integer. * * @param ch A character to convert to an integer digit * @param index The index of the character in the source * @return An integer * @throws DecoderException Thrown if ch is an illegal hex character */ protected static int toDigit(final char ch, final int index) throws DecoderException { final int digit = Character.digit(ch, 16); if (digit == -1) { throw new DecoderException("Illegal hexadecimal character " + ch + " at index " + index); } return digit; } private final Charset charset; /** * Creates a new codec with the default charset name {@link #DEFAULT_CHARSET} */ public Hex() { // use default encoding this.charset = DEFAULT_CHARSET; } /** * Creates a new codec with the given Charset. * * @param charset the charset. * @since 1.7 */ public Hex(final Charset charset) { this.charset = charset; } /** * Creates a new codec with the given charset name. * * @param charsetName the charset name. * @throws java.nio.charset.UnsupportedCharsetException If the named charset is unavailable * @since 1.4 * @since 1.7 throws UnsupportedCharsetException if the named charset is unavailable */ public Hex(final String charsetName) { this(Charset.forName(charsetName)); } /** * Converts an array of character bytes representing hexadecimal values into an array of bytes of those same values. * The returned array will be half the length of the passed array, as it takes two characters to represent any given * byte. An exception is thrown if the passed char array has an odd number of elements. * * @param array An array of character bytes containing hexadecimal digits * @return A byte array containing binary data decoded from the supplied byte array (representing characters). * @throws DecoderException Thrown if an odd number of characters is supplied to this function * @see #decodeHex(char[]) */ @Override public byte[] decode(final byte[] array) throws DecoderException { return decodeHex(new String(array, getCharset()).toCharArray()); } /** * Converts a buffer of character bytes representing hexadecimal values into an array of bytes of those same values. * The returned array will be half the length of the passed array, as it takes two characters to represent any given * byte. An exception is thrown if the passed char array has an odd number of elements. * * <p>All bytes identified by {@link ByteBuffer#remaining()} will be used; after this method * the value {@link ByteBuffer#remaining() remaining()} will be zero.</p> * * @param buffer An array of character bytes containing hexadecimal digits * @return A byte array containing binary data decoded from the supplied byte array (representing characters). * @throws DecoderException Thrown if an odd number of characters is supplied to this function * @see #decodeHex(char[]) * @since 1.11 */ public byte[] decode(final ByteBuffer buffer) throws DecoderException { return decodeHex(new String(toByteArray(buffer), getCharset()).toCharArray()); } /** * Converts a String or an array of character bytes representing hexadecimal values into an array of bytes of those * same values. The returned array will be half the length of the passed String or array, as it takes two characters * to represent any given byte. An exception is thrown if the passed char array has an odd number of elements. * * @param object A String, ByteBuffer, byte[], or an array of character bytes containing hexadecimal digits * @return A byte array containing binary data decoded from the supplied byte array (representing characters). * @throws DecoderException Thrown if an odd number of characters is supplied to this function or the object is not * a String or char[] * @see #decodeHex(char[]) */ @Override public Object decode(final Object object) throws DecoderException { if (object instanceof String) { return decode(((String) object).toCharArray()); } if (object instanceof byte[]) { return decode((byte[]) object); } if (object instanceof ByteBuffer) { return decode((ByteBuffer) object); } try { return decodeHex((char[]) object); } catch (final ClassCastException e) { throw new DecoderException(e.getMessage(), e); } } /** * Converts an array of bytes into an array of bytes for the characters representing the hexadecimal values of each * byte in order. The returned array will be double the length of the passed array, as it takes two characters to * represent any given byte. * <p> * The conversion from hexadecimal characters to the returned bytes is performed with the charset named by * {@link #getCharset()}. * </p> * * @param array a byte[] to convert to hex characters * @return A byte[] containing the bytes of the lower-case hexadecimal characters * @since 1.7 No longer throws IllegalStateException if the charsetName is invalid. * @see #encodeHex(byte[]) */ @Override public byte[] encode(final byte[] array) { return encodeHexString(array).getBytes(this.getCharset()); } /** * Converts byte buffer into an array of bytes for the characters representing the hexadecimal values of each byte * in order. The returned array will be double the length of the passed array, as it takes two characters to * represent any given byte. * * <p>The conversion from hexadecimal characters to the returned bytes is performed with the charset named by * {@link #getCharset()}.</p> * * <p>All bytes identified by {@link ByteBuffer#remaining()} will be used; after this method * the value {@link ByteBuffer#remaining() remaining()} will be zero.</p> * * @param array a byte buffer to convert to hex characters * @return A byte[] containing the bytes of the lower-case hexadecimal characters * @see #encodeHex(byte[]) * @since 1.11 */ public byte[] encode(final ByteBuffer array) { return encodeHexString(array).getBytes(this.getCharset()); } /** * Converts a String or an array of bytes into an array of characters representing the hexadecimal values of each * byte in order. The returned array will be double the length of the passed String or array, as it takes two * characters to represent any given byte. * <p> * The conversion from hexadecimal characters to bytes to be encoded to performed with the charset named by * {@link #getCharset()}. * </p> * * @param object a String, ByteBuffer, or byte[] to convert to hex characters * @return A char[] containing lower-case hexadecimal characters * @throws EncoderException Thrown if the given object is not a String or byte[] * @see #encodeHex(byte[]) */ @Override public Object encode(final Object object) throws EncoderException { final byte[] byteArray; if (object instanceof String) { byteArray = ((String) object).getBytes(this.getCharset()); } else if (object instanceof ByteBuffer) { byteArray = toByteArray((ByteBuffer) object); } else { try { byteArray = (byte[]) object; } catch (final ClassCastException e) { throw new EncoderException(e.getMessage(), e); } } return encodeHex(byteArray); } /** * Gets the charset. * * @return the charset. * @since 1.7 */ public Charset getCharset() { return this.charset; } /** * Gets the charset name. * * @return the charset name. * @since 1.4 */ public String getCharsetName() { return this.charset.name(); } /** * Returns a string representation of the object, which includes the charset name. * * @return a string representation of the object. */ @Override public String toString() { return super.toString() + "[charsetName=" + this.charset + "]"; } }
/* Copyright 2015 Google Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.google.security.zynamics.zylib.general; import java.awt.Color; import java.awt.event.KeyEvent; import javax.xml.bind.DatatypeConverter; import com.google.common.base.CharMatcher; import com.google.common.base.Preconditions; /** * Helper class that can convert data between different formats. */ public final class Convert { private static String[] HEX_ARRAY = {"00", "01", "02", "03", "04", "05", "06", "07", "08", "09", "0A", "0B", "0C", "0D", "0E", "0F", "10", "11", "12", "13", "14", "15", "16", "17", "18", "19", "1A", "1B", "1C", "1D", "1E", "1F", "20", "21", "22", "23", "24", "25", "26", "27", "28", "29", "2A", "2B", "2C", "2D", "2E", "2F", "30", "31", "32", "33", "34", "35", "36", "37", "38", "39", "3A", "3B", "3C", "3D", "3E", "3F", "40", "41", "42", "43", "44", "45", "46", "47", "48", "49", "4A", "4B", "4C", "4D", "4E", "4F", "50", "51", "52", "53", "54", "55", "56", "57", "58", "59", "5A", "5B", "5C", "5D", "5E", "5F", "60", "61", "62", "63", "64", "65", "66", "67", "68", "69", "6A", "6B", "6C", "6D", "6E", "6F", "70", "71", "72", "73", "74", "75", "76", "77", "78", "79", "7A", "7B", "7C", "7D", "7E", "7F", "80", "81", "82", "83", "84", "85", "86", "87", "88", "89", "8A", "8B", "8C", "8D", "8E", "8F", "90", "91", "92", "93", "94", "95", "96", "97", "98", "99", "9A", "9B", "9C", "9D", "9E", "9F", "A0", "A1", "A2", "A3", "A4", "A5", "A6", "A7", "A8", "A9", "AA", "AB", "AC", "AD", "AE", "AF", "B0", "B1", "B2", "B3", "B4", "B5", "B6", "B7", "B8", "B9", "BA", "BB", "BC", "BD", "BE", "BF", "C0", "C1", "C2", "C3", "C4", "C5", "C6", "C7", "C8", "C9", "CA", "CB", "CC", "CD", "CE", "CF", "D0", "D1", "D2", "D3", "D4", "D5", "D6", "D7", "D8", "D9", "DA", "DB", "DC", "DD", "DE", "DF", "E0", "E1", "E2", "E3", "E4", "E5", "E6", "E7", "E8", "E9", "EA", "EB", "EC", "ED", "EE", "EF", "F0", "F1", "F2", "F3", "F4", "F5", "F6", "F7", "F8", "F9", "FA", "FB", "FC", "FD", "FE", "FF"}; /** * Converts an ASCII string into a hex string. * * Example: AAA => 414141 * * @param asciiString The ASCII string to convert. * * @return The converted hex string. */ public static String asciiToHexString(final String asciiString) { final StringBuffer sb = new StringBuffer(); for (final byte b : asciiString.getBytes()) { sb.append(byteToHexString(b)); } return sb.toString(); } public static String byteToHexString(final byte b) { return HEX_ARRAY[b & 0xFF]; } public static String colorToHexString(final Color c) { final String cs = Integer.toHexString(c.getRGB()); return cs.substring(2); } /** * Converts a decimal string into a hexadecimal string. * * Note that the decimal string value must fit into a long value. * * @param decString The decimal string to convert. * * @return The hexadecimal string. */ public static String decStringToHexString(final String decString) { Preconditions.checkNotNull(decString, "Error: Decimal string can't be null"); return Long.toHexString(Long.valueOf(decString, 16)); } /** * Converts a hex to ASCII. If the hex string has an odd number of characters, a 0 is added at the * end of the string. * * Example: 414141 is converted to AAA * * @param hexString The string to convert. * * @return The converted ASCII string. */ public static String hexStringToAsciiString(final String hexString) { final String realText = (hexString.length() % 2) == 0 ? hexString : "0" + hexString; final StringBuilder stringBuilder = new StringBuilder(); for (int i = 0; i < realText.length(); i += 2) { final char c1 = realText.charAt(i); final char c2 = realText.charAt(i + 1); if (!isHexCharacter(c1) || !isHexCharacter(c2)) { throw new IllegalArgumentException("Error: Invalid hex string"); } final char code = (char) ((Character.digit(c1, 16) << 4) + Character.digit(c2, 16)); stringBuilder.append(isPrintableCharacter(code) ? code : "."); } return stringBuilder.toString(); } /** * Converts a hex string to a byte array. If the hex string has an odd number of characters, a 0 * is added at the end of the string. * * Example: 414141 => {0x41, 0x41, 0x41} * * @param hexString The hex string to convert. * * @return The converted byte array. */ public static byte[] hexStringToBytes(final String hexString) { return DatatypeConverter.parseHexBinary(hexString); } /** * Converts an hex string to long. * * @param hexString string to convert. * * @return The hex string. */ public static long hexStringToLong(final String hexString) { Preconditions.checkNotNull(hexString, "Error: Unicode string can't be null"); Preconditions.checkArgument(isHexString(hexString), String.format("Error: Hex string '%s' is not a vaild hex string", hexString)); if ((hexString.length() == 16) && (hexString.charAt(0) >= 8)) { // Reason: Long.parseLong(x, 16) throws NumberFormatException when // x is a negative value (x >= 8000.0000.0000.0000)! NH final String strAddr1 = hexString.substring(0, hexString.length() - 8); final String strAddr2 = hexString.substring(hexString.length() - 8); return (Long.parseLong(strAddr1, 16) << 32) + Long.parseLong(strAddr2, 16); } return Long.parseLong(hexString, 16); } /** * Tests whether a given character is a valid decimal character. * * @param c The character to test. * * @return True, if the given character is a valid decimal character. */ public static boolean isDecCharacter(final char c) { return CharMatcher.inRange('0', '9').apply(c); } /** * Tests whether a given string is a valid decimal string. * * @param string The string to check. * * @return True, if the string is a valid decimal string. False, otherwise. */ public static boolean isDecString(final String string) { Preconditions.checkNotNull(string); final CharMatcher cm = CharMatcher.inRange('0', '9'); for (int i = 0; i < string.length(); i++) { if (!cm.apply(string.charAt(i))) { return false; } } return string.length() != 0; } /** * Tests whether a character is a valid character of a hexadecimal string. * * @param c The character to test. * * @return True, if the character is a hex character. False, otherwise. */ public static boolean isHexCharacter(final char c) { return isDecCharacter(c) || ((c >= 'a') && (c <= 'f')) || ((c >= 'A') && (c <= 'F')); } /** * Tests whether a given string is a valid hexadecimal string. * * @param string The string to check. * * @return True, if the string is a valid hexadecimal string. False, otherwise. */ public static boolean isHexString(final String string) { Preconditions.checkNotNull(string, "Error: String argument can't be null"); final CharMatcher cm = CharMatcher.inRange('0', '9').or(CharMatcher.inRange('a', 'z')) .or(CharMatcher.inRange('A', 'F')); for (int i = 0; i < string.length(); i++) { if (!cm.apply(string.charAt(i))) { return false; } } return string.length() != 0; } /** * Tests whether a given string is a valid MD5 string. * * @param string The string to check. * * @return True, if the string is a valid MD5 string. False, otherwise. */ public static boolean isMD5String(final String string) { Preconditions.checkNotNull(string, "Error: String argument can't be null"); return (string.length() == 32) && isHexString(string); } /** * Tests whether a character is a printable ASCII character. * * @param c The character to test. * * @return True, if the character is a printable ASCII character. False, otherwise. */ public static boolean isPrintableCharacter(final char c) { final Character.UnicodeBlock block = Character.UnicodeBlock.of(c); return !Character.isISOControl(c) && (c != KeyEvent.CHAR_UNDEFINED) && (block != null) && (block != Character.UnicodeBlock.SPECIALS); } /** * Tests whether a given string is a valid SHA1 string. * * @param string The string to check. * * @return True, if the string is a valid SHA1 string. False, otherwise. */ public static boolean isSha1String(final String string) { Preconditions.checkNotNull(string, "Error: String argument can't be null"); return (string.length() == 40) && isHexString(string); } /** * Converts an unicode string to a hex string. * * @param unicodeString The unicode string to convert. * * @return The hex string. */ public static String unicodeToHexString(final String unicodeString) { Preconditions.checkNotNull(unicodeString, "Error: Unicode string can't be null"); final StringBuffer sb = new StringBuffer(); for (final byte b : unicodeString.getBytes()) { sb.append(String.format("%X00", b)); } return sb.toString(); } }
/* Copyright (c) 2013 James Ahlborn Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.healthmarketscience.jackcess.impl; import java.io.ByteArrayInputStream; import java.io.Closeable; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.nio.ByteBuffer; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.sql.Blob; import java.sql.SQLException; import java.sql.SQLFeatureNotSupportedException; import java.text.Normalizer; import java.util.EnumSet; import java.util.Set; import java.util.regex.Pattern; import com.healthmarketscience.jackcess.DataType; import com.healthmarketscience.jackcess.util.OleBlob; import static com.healthmarketscience.jackcess.util.OleBlob.*; import org.apache.commons.lang3.builder.ToStringBuilder; /** * Utility code for working with OLE data. * * @author James Ahlborn * @usage _advanced_class_ */ public class OleUtil { /** * Interface used to allow optional inclusion of the poi library for working * with compound ole data. */ interface CompoundPackageFactory { public ContentImpl createCompoundPackageContent( OleBlobImpl blob, String prettyName, String className, String typeName, ByteBuffer blobBb, int dataBlockLen); } private static final int PACKAGE_SIGNATURE = 0x1C15; private static final Charset OLE_CHARSET = StandardCharsets.US_ASCII; private static final Charset OLE_UTF_CHARSET = StandardCharsets.UTF_16LE; private static final byte[] COMPOUND_STORAGE_SIGNATURE = {(byte)0xd0,(byte)0xcf,(byte)0x11,(byte)0xe0, (byte)0xa1,(byte)0xb1,(byte)0x1a,(byte)0xe1}; private static final String SIMPLE_PACKAGE_TYPE = "Package"; private static final int PACKAGE_OBJECT_TYPE = 0x02; private static final int OLE_VERSION = 0x0501; private static final int OLE_FORMAT = 0x02; private static final int PACKAGE_STREAM_SIGNATURE = 0x02; private static final int PS_EMBEDDED_FILE = 0x030000; private static final int PS_LINKED_FILE = 0x010000; private static final Set<ContentType> WRITEABLE_TYPES = EnumSet.of( ContentType.LINK, ContentType.SIMPLE_PACKAGE, ContentType.OTHER); private static final byte[] NO_DATA = new byte[0]; private static final int LINK_HEADER = 0x01; private static final byte[] PACKAGE_FOOTER = { 0x01, 0x05, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, (byte)0xAD, 0x05, (byte)0xFE }; // regex pattern which matches all the crazy extra stuff in unicode private static final Pattern UNICODE_ACCENT_PATTERN = Pattern.compile("[\\p{InCombiningDiacriticalMarks}\\p{IsLm}\\p{IsSk}]+"); private static final CompoundPackageFactory COMPOUND_FACTORY; static { CompoundPackageFactory compoundFactory = null; try { compoundFactory = (CompoundPackageFactory) Class.forName("com.healthmarketscience.jackcess.impl.CompoundOleUtil") .newInstance(); } catch(Throwable t) { // must not have poi, will load compound ole data as "other" } COMPOUND_FACTORY = compoundFactory; } /** * Parses an access database blob structure and returns an appropriate * OleBlob instance. */ public static OleBlob parseBlob(byte[] bytes) { return new OleBlobImpl(bytes); } /** * Creates a new OlBlob instance using the given information. */ public static OleBlob createBlob(Builder oleBuilder) throws IOException { try { if(!WRITEABLE_TYPES.contains(oleBuilder.getType())) { throw new IllegalArgumentException( "Cannot currently create ole values of type " + oleBuilder.getType()); } long contentLen = oleBuilder.getContentLength(); byte[] contentBytes = oleBuilder.getBytes(); InputStream contentStream = oleBuilder.getStream(); byte[] packageStreamHeader = NO_DATA; byte[] packageStreamFooter = NO_DATA; switch(oleBuilder.getType()) { case LINK: packageStreamHeader = writePackageStreamHeader(oleBuilder); // link "content" is file path contentBytes = getZeroTermStrBytes(oleBuilder.getFilePath()); contentLen = contentBytes.length; break; case SIMPLE_PACKAGE: packageStreamHeader = writePackageStreamHeader(oleBuilder); packageStreamFooter = writePackageStreamFooter(oleBuilder); break; case OTHER: // nothing more to do break; default: throw new RuntimeException("unexpected type " + oleBuilder.getType()); } long payloadLen = packageStreamHeader.length + packageStreamFooter.length + contentLen; byte[] packageHeader = writePackageHeader(oleBuilder, payloadLen); long totalOleLen = packageHeader.length + PACKAGE_FOOTER.length + payloadLen; if(totalOleLen > DataType.OLE.getMaxSize()) { throw new IllegalArgumentException("Content size of " + totalOleLen + " is too large for ole column"); } byte[] oleBytes = new byte[(int)totalOleLen]; ByteBuffer bb = PageChannel.wrap(oleBytes); bb.put(packageHeader); bb.put(packageStreamHeader); if(contentLen > 0L) { if(contentBytes != null) { bb.put(contentBytes); } else { byte[] buf = new byte[8192]; int numBytes = 0; while((numBytes = contentStream.read(buf)) >= 0) { bb.put(buf, 0, numBytes); } } } bb.put(packageStreamFooter); bb.put(PACKAGE_FOOTER); return parseBlob(oleBytes); } finally { ByteUtil.closeQuietly(oleBuilder.getStream()); } } private static byte[] writePackageHeader(Builder oleBuilder, long contentLen) { byte[] prettyNameBytes = getZeroTermStrBytes(oleBuilder.getPrettyName()); String className = oleBuilder.getClassName(); String typeName = oleBuilder.getTypeName(); if(className == null) { className = typeName; } else if(typeName == null) { typeName = className; } byte[] classNameBytes = getZeroTermStrBytes(className); byte[] typeNameBytes = getZeroTermStrBytes(typeName); int packageHeaderLen = 20 + prettyNameBytes.length + classNameBytes.length; int oleHeaderLen = 24 + typeNameBytes.length; byte[] headerBytes = new byte[packageHeaderLen + oleHeaderLen]; ByteBuffer bb = PageChannel.wrap(headerBytes); // write outer package header bb.putShort((short)PACKAGE_SIGNATURE); bb.putShort((short)packageHeaderLen); bb.putInt(PACKAGE_OBJECT_TYPE); bb.putShort((short)prettyNameBytes.length); bb.putShort((short)classNameBytes.length); int prettyNameOff = bb.position() + 8; bb.putShort((short)prettyNameOff); bb.putShort((short)(prettyNameOff + prettyNameBytes.length)); bb.putInt(-1); bb.put(prettyNameBytes); bb.put(classNameBytes); // put ole header bb.putInt(OLE_VERSION); bb.putInt(OLE_FORMAT); bb.putInt(typeNameBytes.length); bb.put(typeNameBytes); bb.putLong(0L); bb.putInt((int)contentLen); return headerBytes; } private static byte[] writePackageStreamHeader(Builder oleBuilder) { byte[] fileNameBytes = getZeroTermStrBytes(oleBuilder.getFileName()); byte[] filePathBytes = getZeroTermStrBytes(oleBuilder.getFilePath()); int headerLen = 6 + fileNameBytes.length + filePathBytes.length; if(oleBuilder.getType() == ContentType.SIMPLE_PACKAGE) { headerLen += 8 + filePathBytes.length; } else { headerLen += 2; } byte[] headerBytes = new byte[headerLen]; ByteBuffer bb = PageChannel.wrap(headerBytes); bb.putShort((short)PACKAGE_STREAM_SIGNATURE); bb.put(fileNameBytes); bb.put(filePathBytes); if(oleBuilder.getType() == ContentType.SIMPLE_PACKAGE) { bb.putInt(PS_EMBEDDED_FILE); bb.putInt(filePathBytes.length); bb.put(filePathBytes, 0, filePathBytes.length); bb.putInt((int) oleBuilder.getContentLength()); } else { bb.putInt(PS_LINKED_FILE); bb.putShort((short)LINK_HEADER); } return headerBytes; } private static byte[] writePackageStreamFooter(Builder oleBuilder) { // note, these are _not_ zero terminated byte[] fileNameBytes = oleBuilder.getFileName().getBytes(OLE_UTF_CHARSET); byte[] filePathBytes = oleBuilder.getFilePath().getBytes(OLE_UTF_CHARSET); int footerLen = 12 + (filePathBytes.length * 2) + fileNameBytes.length; byte[] footerBytes = new byte[footerLen]; ByteBuffer bb = PageChannel.wrap(footerBytes); bb.putInt(filePathBytes.length/2); bb.put(filePathBytes); bb.putInt(fileNameBytes.length/2); bb.put(fileNameBytes); bb.putInt(filePathBytes.length/2); bb.put(filePathBytes); return footerBytes; } /** * creates the appropriate ContentImpl for the given blob. */ private static ContentImpl parseContent(OleBlobImpl blob) throws IOException { ByteBuffer bb = PageChannel.wrap(blob.getBytes()); if((bb.remaining() < 2) || (bb.getShort() != PACKAGE_SIGNATURE)) { return new UnknownContentImpl(blob); } // read outer package header int headerSize = bb.getShort(); /* int objType = */ bb.getInt(); int prettyNameLen = bb.getShort(); int classNameLen = bb.getShort(); int prettyNameOff = bb.getShort(); int classNameOff = bb.getShort(); /* int objSize = */ bb.getInt(); String prettyName = readStr(bb, prettyNameOff, prettyNameLen); String className = readStr(bb, classNameOff, classNameLen); bb.position(headerSize); // read ole header int oleVer = bb.getInt(); /* int format = */ bb.getInt(); if(oleVer != OLE_VERSION) { return new UnknownContentImpl(blob); } int typeNameLen = bb.getInt(); String typeName = readStr(bb, bb.position(), typeNameLen); bb.getLong(); // unused int dataBlockLen = bb.getInt(); int dataBlockPos = bb.position(); if(SIMPLE_PACKAGE_TYPE.equalsIgnoreCase(typeName)) { return createSimplePackageContent( blob, prettyName, className, typeName, bb, dataBlockLen); } // if COMPOUND_FACTORY is null, the poi library isn't available, so just // load compound data as "other" if((COMPOUND_FACTORY != null) && (bb.remaining() >= COMPOUND_STORAGE_SIGNATURE.length) && ByteUtil.matchesRange(bb, bb.position(), COMPOUND_STORAGE_SIGNATURE)) { return COMPOUND_FACTORY.createCompoundPackageContent( blob, prettyName, className, typeName, bb, dataBlockLen); } // this is either some other "special" (as yet unhandled) format, or it is // simply an embedded file (or it is compound data and poi isn't available) return new OtherContentImpl(blob, prettyName, className, typeName, dataBlockPos, dataBlockLen); } private static ContentImpl createSimplePackageContent( OleBlobImpl blob, String prettyName, String className, String typeName, ByteBuffer blobBb, int dataBlockLen) { int dataBlockPos = blobBb.position(); ByteBuffer bb = PageChannel.narrowBuffer(blobBb, dataBlockPos, dataBlockPos + dataBlockLen); int packageSig = bb.getShort(); if(packageSig != PACKAGE_STREAM_SIGNATURE) { return new OtherContentImpl(blob, prettyName, className, typeName, dataBlockPos, dataBlockLen); } String fileName = readZeroTermStr(bb); String filePath = readZeroTermStr(bb); int packageType = bb.getInt(); if(packageType == PS_EMBEDDED_FILE) { int localFilePathLen = bb.getInt(); String localFilePath = readStr(bb, bb.position(), localFilePathLen); int dataLen = bb.getInt(); int dataPos = bb.position(); bb.position(dataLen + dataPos); // remaining strings are in "reverse" order (local file path, file name, // file path). these string usee a real utf charset, and therefore can // "fix" problems with ascii based names (so we prefer these strings to // the original strings we found) int strNum = 0; while(true) { int rem = bb.remaining(); if(rem < 4) { break; } int strLen = bb.getInt(); String remStr = readStr(bb, bb.position(), strLen * 2, OLE_UTF_CHARSET); switch(strNum) { case 0: localFilePath = remStr; break; case 1: fileName = remStr; break; case 2: filePath = remStr; break; default: // ignore } ++strNum; } return new SimplePackageContentImpl( blob, prettyName, className, typeName, dataPos, dataLen, fileName, filePath, localFilePath); } if(packageType == PS_LINKED_FILE) { bb.getShort(); //unknown String linkStr = readZeroTermStr(bb); return new LinkContentImpl(blob, prettyName, className, typeName, fileName, linkStr, filePath); } return new OtherContentImpl(blob, prettyName, className, typeName, dataBlockPos, dataBlockLen); } private static String readStr(ByteBuffer bb, int off, int len) { return readStr(bb, off, len, OLE_CHARSET); } private static String readZeroTermStr(ByteBuffer bb) { int off = bb.position(); while(bb.hasRemaining()) { byte b = bb.get(); if(b == 0) { break; } } int len = bb.position() - off; return readStr(bb, off, len); } private static String readStr(ByteBuffer bb, int off, int len, Charset charset) { String str = new String(bb.array(), off, len, charset); bb.position(off + len); if(str.charAt(str.length() - 1) == '\0') { str = str.substring(0, str.length() - 1); } return str; } private static byte[] getZeroTermStrBytes(String str) { // since we are converting to ascii, try to make "nicer" versions of crazy // chars (e.g. convert "u with an umlaut" to just "u"). this may not // ultimately help anything but it is what ms access does. // decompose complex chars into combos of char and accent str = Normalizer.normalize(str, Normalizer.Form.NFD); // strip the accents str = UNICODE_ACCENT_PATTERN.matcher(str).replaceAll(""); // (re)normalize what is left str = Normalizer.normalize(str, Normalizer.Form.NFC); return (str + '\0').getBytes(OLE_CHARSET); } static final class OleBlobImpl implements OleBlob, ColumnImpl.InMemoryBlob { private byte[] _bytes; private ContentImpl _content; private OleBlobImpl(byte[] bytes) { _bytes = bytes; } @Override public void writeTo(OutputStream out) throws IOException { out.write(_bytes); } @Override public Content getContent() throws IOException { if(_content == null) { _content = parseContent(this); } return _content; } @Override public InputStream getBinaryStream() throws SQLException { return new ByteArrayInputStream(_bytes); } @Override public InputStream getBinaryStream(long pos, long len) throws SQLException { return new ByteArrayInputStream(_bytes, fromJdbcOffset(pos), (int)len); } @Override public long length() throws SQLException { return _bytes.length; } @Override public byte[] getBytes() throws IOException { if(_bytes == null) { throw new IOException("blob is closed"); } return _bytes; } @Override public byte[] getBytes(long pos, int len) throws SQLException { return ByteUtil.copyOf(_bytes, fromJdbcOffset(pos), len); } @Override public long position(byte[] pattern, long start) throws SQLException { int pos = ByteUtil.findRange(PageChannel.wrap(_bytes), fromJdbcOffset(start), pattern); return((pos >= 0) ? toJdbcOffset(pos) : pos); } @Override public long position(Blob pattern, long start) throws SQLException { return position(pattern.getBytes(1L, (int)pattern.length()), start); } @Override public OutputStream setBinaryStream(long position) throws SQLException { throw new SQLFeatureNotSupportedException(); } @Override public void truncate(long len) throws SQLException { throw new SQLFeatureNotSupportedException(); } @Override public int setBytes(long pos, byte[] bytes) throws SQLException { throw new SQLFeatureNotSupportedException(); } @Override public int setBytes(long pos, byte[] bytes, int offset, int lesn) throws SQLException { throw new SQLFeatureNotSupportedException(); } @Override public void free() { close(); } @Override public void close() { _bytes = null; ByteUtil.closeQuietly(_content); _content = null; } private static int toJdbcOffset(int off) { return off + 1; } private static int fromJdbcOffset(long off) { return (int)off - 1; } @Override public String toString() { ToStringBuilder sb = CustomToStringStyle.builder(this); if(_content != null) { sb.append("content", _content); } else { sb.append("bytes", _bytes); sb.append("content", "(uninitialized)"); } return sb.toString(); } } static abstract class ContentImpl implements Content, Closeable { protected final OleBlobImpl _blob; protected ContentImpl(OleBlobImpl blob) { _blob = blob; } @Override public OleBlobImpl getBlob() { return _blob; } protected byte[] getBytes() throws IOException { return getBlob().getBytes(); } @Override public void close() { // base does nothing } protected ToStringBuilder toString(ToStringBuilder sb) { sb.append("type", getType()); return sb; } } static abstract class EmbeddedContentImpl extends ContentImpl implements EmbeddedContent { private final int _position; private final int _length; protected EmbeddedContentImpl(OleBlobImpl blob, int position, int length) { super(blob); _position = position; _length = length; } @Override public long length() { return _length; } @Override public InputStream getStream() throws IOException { return new ByteArrayInputStream(getBytes(), _position, _length); } @Override public void writeTo(OutputStream out) throws IOException { out.write(getBytes(), _position, _length); } @Override protected ToStringBuilder toString(ToStringBuilder sb) { super.toString(sb); if(_position >= 0) { sb.append("content", ByteBuffer.wrap(_blob._bytes, _position, _length)); } return sb; } } static abstract class EmbeddedPackageContentImpl extends EmbeddedContentImpl implements PackageContent { private final String _prettyName; private final String _className; private final String _typeName; protected EmbeddedPackageContentImpl( OleBlobImpl blob, String prettyName, String className, String typeName, int position, int length) { super(blob, position, length); _prettyName = prettyName; _className = className; _typeName = typeName; } @Override public String getPrettyName() { return _prettyName; } @Override public String getClassName() { return _className; } @Override public String getTypeName() { return _typeName; } @Override protected ToStringBuilder toString(ToStringBuilder sb) { sb.append("prettyName", _prettyName) .append("className", _className) .append("typeName", _typeName); super.toString(sb); return sb; } } private static final class LinkContentImpl extends EmbeddedPackageContentImpl implements LinkContent { private final String _fileName; private final String _linkPath; private final String _filePath; private LinkContentImpl(OleBlobImpl blob, String prettyName, String className, String typeName, String fileName, String linkPath, String filePath) { super(blob, prettyName, className, typeName, -1, -1); _fileName = fileName; _linkPath = linkPath; _filePath = filePath; } @Override public ContentType getType() { return ContentType.LINK; } @Override public String getFileName() { return _fileName; } @Override public String getLinkPath() { return _linkPath; } @Override public String getFilePath() { return _filePath; } @Override public InputStream getLinkStream() throws IOException { return new FileInputStream(getLinkPath()); } @Override public String toString() { return toString(CustomToStringStyle.builder(this)) .append("fileName", _fileName) .append("linkPath", _linkPath) .append("filePath", _filePath) .toString(); } } private static final class SimplePackageContentImpl extends EmbeddedPackageContentImpl implements SimplePackageContent { private final String _fileName; private final String _filePath; private final String _localFilePath; private SimplePackageContentImpl(OleBlobImpl blob, String prettyName, String className, String typeName, int position, int length, String fileName, String filePath, String localFilePath) { super(blob, prettyName, className, typeName, position, length); _fileName = fileName; _filePath = filePath; _localFilePath = localFilePath; } @Override public ContentType getType() { return ContentType.SIMPLE_PACKAGE; } @Override public String getFileName() { return _fileName; } @Override public String getFilePath() { return _filePath; } @Override public String getLocalFilePath() { return _localFilePath; } @Override public String toString() { return toString(CustomToStringStyle.builder(this)) .append("fileName", _fileName) .append("filePath", _filePath) .append("localFilePath", _localFilePath) .toString(); } } private static final class OtherContentImpl extends EmbeddedPackageContentImpl implements OtherContent { private OtherContentImpl( OleBlobImpl blob, String prettyName, String className, String typeName, int position, int length) { super(blob, prettyName, className, typeName, position, length); } @Override public ContentType getType() { return ContentType.OTHER; } @Override public String toString() { return toString(CustomToStringStyle.builder(this)) .toString(); } } private static final class UnknownContentImpl extends ContentImpl { private UnknownContentImpl(OleBlobImpl blob) { super(blob); } @Override public ContentType getType() { return ContentType.UNKNOWN; } @Override public String toString() { return toString(CustomToStringStyle.builder(this)) .append("content", _blob._bytes) .toString(); } } }
package org.basex.gui.layout; import static org.basex.gui.GUIConstants.*; import static org.basex.gui.layout.BaseXKeys.*; import java.awt.*; import java.awt.event.*; /** * DoubleSlider implementation. * * @author BaseX Team 2005-20, BSD License * @author Christian Gruen */ public final class BaseXDSlider extends BaseXPanel { /** Label space (unscaled). */ public static final int LABELW = 300; /** Slider width. */ private static final int ARROW = 17; /** Minimum slider value. */ public final double min; /** Maximum slider value. */ public final double max; /** Current slider value. */ public double currMin; /** Current slider value. */ public double currMax; /** Integer flag. */ public boolean itr; /** Listener. */ private final ActionListener listener; /** Cached slider value. */ private double oldMin; /** Cached slider value. */ private double oldMax; /** Mouse position for dragging operations. */ private int mouX; /** Left button flag. */ private boolean left; /** Right button flag. */ private boolean right; /** Right button flag. */ private boolean center; /** Logarithmic scale. */ private final boolean log; /** * Constructor. * @param win reference to the main window * @param min min value * @param max max value * @param listener listener */ public BaseXDSlider(final BaseXWindow win, final double min, final double max, final ActionListener listener) { super(win); this.listener = listener; this.min = min; this.max = max; currMin = min; currMax = max; // choose logarithmic scaling for larger ranges log = StrictMath.log(max) - StrictMath.log(min) > 5 && max - min > 100; setOpaque(false); setFocusable(true); BaseXLayout.setWidth(this, 200 + LABELW); setPreferredSize(new Dimension(getPreferredSize().width, getFont().getSize() + 9)); addFocusListener(new FocusListener() { @Override public void focusGained(final FocusEvent e) { repaint(); } @Override public void focusLost(final FocusEvent e) { repaint(); } }); addKeyListener(this); addMouseListener(this); addMouseMotionListener(this); setToolTip(); } @Override public void mouseMoved(final MouseEvent e) { mouX = e.getX(); final Range r = new Range(this); left = mouX >= r.xs && mouX <= r.xs + ARROW; right = mouX >= r.xe && mouX <= r.xe + ARROW; center = mouX + ARROW > r.xs && mouX < r.xe; oldMin = encode(currMin); oldMax = encode(currMax); } @Override public void mousePressed(final MouseEvent e) { mouseMoved(e); } @Override public void mouseDragged(final MouseEvent e) { if(!left && !right && !center) return; final Range r = new Range(this); final double prop = r.dist * (mouX - e.getX()) / r.w; if(left) { currMin = limit(min, currMax, decode(oldMin - prop) - 1); } else if(right) { currMax = limit(currMin, max, decode(oldMax - prop) - 1); } else { currMin = limit(min, max, decode(oldMin - prop) - 1); currMax = limit(min, max, decode(oldMax - prop) - 1); } if(itr) { currMin = (long) currMin; currMax = (long) currMax; } listener.actionPerformed(null); setToolTip(); repaint(); } /** * Sets a new tooltip. */ private void setToolTip() { final double mn = (long) (currMin * 100) / 100.0; final double mx = (long) (currMax * 100) / 100.0; setToolTipText(BaseXLayout.value(mn) + " - " + BaseXLayout.value(mx)); } @Override public void mouseReleased(final MouseEvent e) { left = false; right = false; center = false; } @Override public void keyPressed(final KeyEvent e) { oldMin = currMin; oldMax = currMin; double diffMin = 0; double diffMax = 0; if(PREVCHAR.is(e)) { diffMin = -1; diffMax = -1; } else if(NEXTCHAR.is(e)) { diffMin = 1; diffMax = 1; } else if(PREVLINE.is(e)) { diffMin = -1; diffMax = 1; } else if(NEXTLINE.is(e)) { diffMin = 1; diffMax = -1; } else if(LINESTART.is(e)) { currMin = min; } else if(LINEEND.is(e)) { currMax = max; } if(e.isShiftDown()) { diffMin /= 10; diffMax /= 10; } final double dist = encode(max) - encode(min); diffMin = dist / 20 * diffMin; diffMax = dist / 20 * diffMax; if(diffMin != 0) { currMin = limit(min, currMax, decode(Math.max(0, encode(currMin) + diffMin))); } if(diffMax != 0) { currMax = limit(currMin, max, decode(Math.max(0, encode(currMax) + diffMax))); } if(currMin != oldMin || currMax != oldMax) { if(itr) { if(currMin != oldMin) currMin = currMin > oldMin ? Math.max(oldMin + 1, (long) currMin) : Math.min(oldMin - 1, (long) currMin); if(currMax != oldMax) currMax = currMax > oldMax ? Math.max(oldMax + 1, (long) currMax) : Math.min(oldMax - 1, (long) currMax); } listener.actionPerformed(null); repaint(); } } @Override public void paintComponent(final Graphics g) { super.paintComponent(g); final int w = getWidth() - LABELW; final int h = getHeight(); final int hc = h / 2; final int s = 4; final boolean focus = hasFocus(); g.setColor(BACK); g.fillRect(0, hc - s, w, s << 1); g.setColor(TEXT); g.drawLine(0, hc - s, w - 1, hc - s); g.drawLine(0, hc - s, 0, hc + s); g.setColor(color2); g.drawLine(w - 1, hc - s, w - 1, hc + s); g.drawLine(0, hc + s, w, hc + s); final Range r = new Range(this); BaseXLayout.drawCell(g, r.xs, r.xe + ARROW, 2, h - 2, false); if(r.xs + ARROW < r.xe) { g.setColor(color4); g.drawLine(r.xs + ARROW, 3, r.xs + ARROW, h - 4); g.drawLine(r.xe - 1, 3, r.xe - 1, h - 4); g.setColor(BACK); if(r.xs + ARROW + 2 < r.xe) { g.drawLine(r.xs + ARROW + 1, 4, r.xs + ARROW + 1, h - 5); g.drawLine(r.xe, 4, r.xe, h - 5); } g.drawLine(r.xs + ARROW - 1, 4, r.xs + ARROW - 1, h - 5); g.drawLine(r.xe - 2, 4, r.xe - 2, h - 5); } // draw arrows final Polygon pol = new Polygon( new int[] { r.xs + 11, r.xs + 5, r.xs + 5, r.xs + 11 }, new int[] { hc - 5, hc - 1, hc, hc + 5 }, 4); g.setColor(focus ? color4 : gray); g.fillPolygon(pol); pol.xpoints = new int[] { r.xe + 5, r.xe + 12, r.xe + 12, r.xe + 5 }; g.fillPolygon(pol); g.setColor(focus ? TEXT : dgray); g.drawLine(r.xs + 11, hc - 5, r.xs + 11, hc + 4); g.drawLine(r.xs + 11, hc - 5, r.xs + 6, hc - 1); g.drawLine(r.xe + 5, hc - 5, r.xe + 5, hc + 4); g.drawLine(r.xe + 5, hc - 5, r.xe + 11, hc - 1); g.setColor(BACK); g.drawLine(r.xs + 10, hc + 4, r.xs + 6, hc + 1); g.drawLine(r.xe + 6, hc + 4, r.xe + 11, hc + 1); // draw range info g.setColor(TEXT); final double mn = (long) (currMin * 100) / 100.0; final double mx = (long) (currMax * 100) / 100.0; g.drawString(BaseXLayout.value(mn) + " - " + BaseXLayout.value(mx), w + 15, h - (h - getFont().getSize()) / 2); } /** * Encodes the specified value. * @param v value to be normalized * @return new value */ private double encode(final double v) { return log ? StrictMath.log(v + 1) : v; } /** * Decodes the specified value. * @param v value to be normalized * @return new value */ private double decode(final double v) { return log ? StrictMath.exp(v) - 1 : v; } /** * Returns a double in the specified minimum and maximum range. * @param mn minimum value * @param mx maximum value * @param val value * @return new value */ private static double limit(final double mn, final double mx, final double val) { return Math.max(mn, Math.min(mx, val)); } /** Range class. */ private static class Range { /** Range distance. */ final double dist; /** Start position. */ final int xs; /** End position. */ final int xe; /** Slider width. */ final int w; /** * Constructor. * @param s slider reference */ Range(final BaseXDSlider s) { w = s.getWidth() - LABELW - (ARROW << 1); dist = s.encode(s.max - s.min); xs = (int) (s.encode(s.currMin - s.min) * w / dist); xe = (s.min == s.max ? w : (int) (s.encode(s.currMax - s.min) * w / dist)) + ARROW; } } }
package org.ieeeguc.ieeeguc.models; import org.ieeeguc.ieeeguc.HTTPResponse; import org.json.JSONException; import org.json.JSONObject; import java.io.IOException; import java.util.Date; import java.util.HashMap; import okhttp3.Call; import okhttp3.Callback; import okhttp3.MediaType; import okhttp3.OkHttpClient; import okhttp3.Request; import okhttp3.RequestBody; import okhttp3.Response; /** * Class to be used as a container for the user. */ public class User{ public static final MediaType CONTENT_TYPE = MediaType.parse("application/json; charset=utf-8"); public static enum Type { ADMIN, HIGH_BOARD, MEMBER, UPPER_BOARD } public static enum Gender { MALE, FEMALE } private int id; private Type type; private String firstName; private String lastName; private String email; private Gender gender; private Date birthdate; private String ieeeMembershipID; private int committeeID; private String committeeName; private String phoneNumber; private JSONObject settings; public User(int id, Type type, String firstName, String lastName, Gender gender, String email, Date birthdate, String ieeeMembershipID, int committeeID, String committeeName, String phoneNumber, JSONObject settings) { this.type = type; this.firstName = firstName; this.lastName = lastName; this.gender = gender; this.email = email; this.birthdate = birthdate; this.ieeeMembershipID = ieeeMembershipID; this.committeeID = committeeID; this.committeeName = committeeName; this.id = id; this.phoneNumber = phoneNumber; this.settings = settings; } public int getId() { return id; } public String getPhoneNumber() { return phoneNumber; } public JSONObject getSettings() { return settings; } public Type getType() { return type; } public String getFirstName() { return firstName; } public String getLastName() { return lastName; } public String getEmail() { return email; } public Date getBirthdate() { return birthdate; } public Gender getGender() { return gender; } public String getIeeeMembershipID() { return ieeeMembershipID; } public int getCommitteeID() { return committeeID; } public String getCommitteeName() { return committeeName; } /** * this method is called when the user forget the password and send an email to the user email address * @param {string} email [email of the user] * @param {HTTPResponse} HTTP_RESPONSE [HTTPResponse interface instance] * @return {void} */ public static void forgetPassword(String email, final HTTPResponse HTTP_RESPONSE) { HashMap <String,String>body = new HashMap(); body.put("email", email) ; OkHttpClient client = new OkHttpClient(); final Request request = new Request.Builder() .url("http://ieeeguc.org/api/forgotPassword") .post(RequestBody.create(CONTENT_TYPE,(new JSONObject(body)).toString())) .build(); client.newCall(request).enqueue(new Callback() { public void onFailure(Call call, IOException e) { HTTP_RESPONSE.onFailure(-1, null); call.cancel(); } @Override public void onResponse(Call call, Response response) throws IOException { try { String body = response.body().string(); HTTP_RESPONSE.onSuccess(200,new JSONObject(body)) ; } catch (JSONException e) { HTTP_RESPONSE.onFailure(500, null); ; } response.close(); } }); } /** * This Method is used by the user to login to the Server * @param {string} email [email of the user] * @param {string} password [password of the user] * @param {HttpResponse} HTTP_RESPONSE [http interface instance which is the response coming from the server after logging in containing info of the user in the Database] * @return {void} */ public static void login(String email , String password ,final HTTPResponse HTTP_RESPONSE){ OkHttpClient client = new OkHttpClient(); JSONObject jsonBody = new JSONObject(); try{ jsonBody.put("email",email); jsonBody.put("password",password); RequestBody body = RequestBody.create(CONTENT_TYPE, jsonBody.toString()); Request request = new Request.Builder() .url("http://ieeeguc.org/api/login") .header("user_agent","Android") .post(body) .build(); client.newCall(request).enqueue(new Callback() { public void onFailure(Call call, IOException e) { HTTP_RESPONSE.onFailure(-1,null); call.cancel(); } @Override public void onResponse(Call call, Response response) throws IOException { try { String responseData = response.body().string(); JSONObject json = new JSONObject(responseData); int x = response.code(); String y = Integer.toString(x); if(y.charAt(0)== '2'){ HTTP_RESPONSE.onSuccess(x,json); } else{ HTTP_RESPONSE.onFailure(x,json); } } catch (JSONException e) { HTTP_RESPONSE.onFailure(500,null); } response.close(); } }); }catch(JSONException e){ HTTP_RESPONSE.onFailure(-1,null); } } /** * this method is called when the user to get information about some other user , the returned body will differ according to type of requested user * @param {String} token [token of the user] * @param {int} id [id of the user] * @param {HTTPResponse} HTTP_RESPONSE [HTTPResponse interface instance] * @return {void} */ public static void getUser(String token, int id, final HTTPResponse HTTP_RESPONSE){ OkHttpClient client= new OkHttpClient(); Request request=new Request.Builder() .url("http://ieeeguc.org/api/User/"+id) .addHeader("Authorization",token) .addHeader("user_agent","Android") .build(); client.newCall(request).enqueue(new Callback() { public void onFailure(Call call, IOException e) { HTTP_RESPONSE.onFailure(-1,null); call.cancel(); } public void onResponse(Call call, okhttp3.Response response) throws IOException { int code=response.code(); String c=code+""; String body=response.body().string(); try { JSONObject rr =new JSONObject(body); if(c.charAt(0)=='2'){ HTTP_RESPONSE.onSuccess(code,rr); }else { HTTP_RESPONSE.onFailure(code,rr); } }catch (JSONException e){ HTTP_RESPONSE.onFailure(code,null); } response.close(); } }); } /** * This method is called when the user performs an editing operation on his profile. * @param {String} token [user's token] * @param {String} oldPassword [user's current password] * @param {String} newPassword [user's new password] * @param {String} IeeeMembershipID [user's IEEE membership id] * @param {String} phoneNumber [user's phone number] * @param {HTTPResponse} HTTP_RESPONSE [HTTPResponse interface instance] * @return {void} */ public void editProfile(String token, String oldPassword, String newPassword, String IeeeMembershipID, String phoneNumber, final HTTPResponse HTTP_RESPONSE) { OkHttpClient client = new OkHttpClient(); HashMap<String, String> body = new HashMap<>(); body.put("old_password",oldPassword); body.put("new_password",newPassword); body.put("IEEE_membership_ID",IeeeMembershipID); body.put("phone_number",phoneNumber); Request request = new Request.Builder().put(RequestBody.create(CONTENT_TYPE, new JSONObject(body).toString())) .addHeader("Authorization", token) .addHeader("user_agent", "Android") .url("http://ieeeguc.org/api/user").build(); client.newCall(request).enqueue(new Callback() { @Override public void onFailure(Call call, IOException e) { //No Internet Connection. HTTP_RESPONSE.onFailure(-1, null); call.cancel(); } @Override public void onResponse(Call call, Response response) throws IOException { //Getting the status code. int statusCode = response.code(); String code = String.valueOf(statusCode); if (code.charAt(0) == '2') { // The received code is of the format 2xx, and the call was successful. try { JSONObject responseBody = new JSONObject(response.body().string()); HTTP_RESPONSE.onSuccess(statusCode, responseBody); } catch (JSONException e) { HTTP_RESPONSE.onFailure(500, null); } } else { // The received code is of the format 3xx or 4xx or 5xx, // and the call wasn't successful. try { JSONObject responseBody = new JSONObject(response.body().string()); HTTP_RESPONSE.onFailure(statusCode, responseBody); } catch (JSONException e) { HTTP_RESPONSE.onFailure(500, null); } } response.close(); } }); } /** * This method is called when the user logs out. * @param {String} token [token of the user] * @param {HTTPResponse} HTTP_RESPONSE [HTTP_RESPONSE interface instance] * @return {void} */ public void logout(String token, final HTTPResponse HTTP_RESPONSE){ OkHttpClient ok = new OkHttpClient(); Request request = new Request.Builder() .addHeader("Authorization",token) .addHeader("user_agent","Android") .url("http://ieeeguc.org/api/logout") .build(); ok.newCall(request).enqueue(new Callback() { @Override public void onFailure(Call call, IOException e) { HTTP_RESPONSE.onFailure(-1, null); call.cancel(); } @Override public void onResponse(Call call, Response response) throws IOException { int code = response.code(); String body = response.body().string(); try { JSONObject j = new JSONObject(body); if(code/100 == 2) { HTTP_RESPONSE.onSuccess(code,j); } else { HTTP_RESPONSE.onFailure(code,j); } } catch (JSONException e) { HTTP_RESPONSE.onFailure(500,null); } response.close(); } }); } }
/* * Copyright (c) 2015 David Schulte * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package de.arcus.framework.utils; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.ArrayList; import java.util.Collections; import java.util.List; import de.arcus.framework.logger.Logger; /** * Help function for files */ public class FileTools { /** * Private constructor */ private FileTools() {} /** * Creates a directory if it not exists * @param dir Directory path * @return Returns true if the directory was created or already exists */ public static boolean directoryCreate(String dir) { File fileDirectory = new File(dir); try { if (!fileDirectory.exists()) { Logger.getInstance().logVerbose("DirectoryCreate", "Create directory: " + dir); // Creates the directory if (fileDirectory.mkdirs()) return true; else Logger.getInstance().logWarning("DirectoryCreate", "MkDir failed"); } else { // Directory exists Logger.getInstance().logDebug("DirectoryCreate", "Directory already exists"); return true; } } catch (Exception e) { // Failed Logger.getInstance().logError("DirectoryCreate", "Failed: " + e.getMessage()); } return false; } /** * Checks if the directory exists * @param dir Path of the file * @return Return whether the directory exists */ public static boolean directoryExists(String dir) { File tmp = new File(dir); // Checks whether the directory exists and whether it is a directory return (tmp.isDirectory() && tmp.exists()); } /** * Creates an empty file * @param file File path * @return Returns true if the file was successfully created */ public static boolean fileCreate(String file) { Logger.getInstance().logVerbose("FileCreate", "File: " + file); try { // Create the file return (new File(file)).createNewFile(); } catch (IOException e) { // Failed Logger.getInstance().logError("FileCreate", "Could not create file: " + e.getMessage()); return false; } } /** * Moves a file * @param src Soruce path * @param dest Destination path * @return Return whether the moving was successful */ public static boolean fileMove(String src, String dest) { Logger.getInstance().logVerbose("FileMove", "From " + src + " to " + dest); File fileSrc = new File(src); File fileDest = new File(dest); // Move the file return fileSrc.renameTo(fileDest); } /** * Copies a stream * @param inputStream Source stream * @param outputStream Destination stream * @return Return whether the stream was copied successful */ public static boolean fileCopy(InputStream inputStream, OutputStream outputStream) { // The buffer size final int BUFFER_SIZE = 1024; // Will be set on true if the file was copied correctly boolean success = false; try { // The copy buffer byte[] buffer = new byte[BUFFER_SIZE]; int length; // Copy block by block while ((length = inputStream.read(buffer)) > 0) { outputStream.write(buffer, 0, length); } // Copy was successful success = true; } catch (IOException ex) { // Failed Logger.getInstance().logError("FileCopy", "Failed: " + ex.toString()); } return success; } /** * Copies a file * @param src Source path * @param dest Destination path * @return Return whether the file was copied successful */ public static boolean fileCopy(String src, String dest) { Logger.getInstance().logVerbose("FileCopy", "From " + src + " to " + dest); // Will be set on true if the file was copied correctly boolean success = false; InputStream inputStream = null; OutputStream outputStream = null; try { // Open the file streams inputStream = new FileInputStream(src); outputStream = new FileOutputStream(dest); success = fileCopy(inputStream, outputStream); } catch (IOException ex) { // Failed Logger.getInstance().logError("FileCopy", "Failed: " + ex.toString()); } try { // Close all streams if (inputStream != null) inputStream.close(); if (outputStream != null) outputStream.close(); } catch (IOException ex) { // Failed Logger.getInstance().logError("FileCopy", "Failed: " + ex.toString()); } return success; } /** * Deletes a file * @param file Path of the file * @return Returns whether the deleting was successful */ public static boolean fileDelete(String file) { // Delete the file return (new File(file)).delete(); } /** * Checks if the file exists * @param file Path of the file * @return Return whether the file exists */ public static boolean fileExists(String file) { File tmp = new File(file); // Checks whether the file exists and whether it is a file return (tmp.isFile() && tmp.exists()); } /** * Checks whether the file or directory is a link * @param path Path of the file / directory * @return Returns whether the file or directory is a link */ public static boolean pathIsSymbolicLink(String path) { File file = new File(path); try { // Checks whether the file / directory is a symbolic link return (file.getAbsolutePath().equals(file.getCanonicalPath())); } catch (IOException e) { e.printStackTrace(); } return false; } /** * Gets the root canonical file of a symbolic link * @param path The path * @return The root file */ public static File getRootCanonicalFile(String path) { return getRootCanonicalFile(new File(path)); } /** * Gets the root canonical file of a symbolic link * @param file The file * @return The root file */ public static File getRootCanonicalFile(File file) { try { // Gets the canonical file File canonicalFile = file.getCanonicalFile(); // Differences between the canonical and the absolute file while (!file.getAbsolutePath().equals(canonicalFile.getAbsolutePath())) { file = canonicalFile; // Go deeper canonicalFile = file.getCanonicalFile(); } } catch (IOException e) { // Failed e.printStackTrace(); } return file; } /** * Gets all storages; eg. all sdcards * @return List of all storages */ public static String[] getStorages() { List<String> storages = new ArrayList<>(); // Hard coded mount points final String[] mountPointBlacklist = new String[] { "/mnt/tmp", "/mnt/factory", "/mnt/obb", "/mnt/asec", "/mnt/secure", "/mnt/media_rw", "/mnt/shell", "/storage/emulated" }; final String[] mountPointDirectories = new String[] { "/mnt", "/storage" }; final String[] mountPoints = new String[] { "/sdcard", "/external_sd" }; // Adds all mount point directories for(String mountPointDirectory : mountPointDirectories) { // Checks all subdirectories File dir = getRootCanonicalFile(mountPointDirectory); if (dir.exists() && dir.isDirectory()) { File[] files = dir.listFiles(); if (files != null) { for (File subDir : files) { subDir = getRootCanonicalFile(subDir); // Is directory if (subDir.isDirectory() && subDir.canRead()) { // Add mount point to list if (!storages.contains(subDir.getAbsolutePath())) storages.add(subDir.getAbsolutePath()); } } } } } // Adds all direct mount points for(String mountPoint : mountPoints) { File file = getRootCanonicalFile(mountPoint); if (file.isDirectory() && file.canRead()) { if (!storages.contains(file.getAbsolutePath())) storages.add(file.getAbsolutePath()); } } // Remove all blacklisted paths for (String blacklistPath : mountPointBlacklist) { storages.remove(blacklistPath); } // Sort the list Collections.sort(storages); // Returns the array return storages.toArray(new String[storages.size()]); } }
/* * Copyright 2015 Alexey Andreev. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.teavm.classlib.java.lang.reflect; import java.lang.annotation.Annotation; import java.lang.annotation.Retention; import java.util.ArrayList; import java.util.List; import org.teavm.dependency.AbstractDependencyListener; import org.teavm.dependency.DependencyAgent; import org.teavm.dependency.MethodDependency; import org.teavm.model.AccessLevel; import org.teavm.model.AnnotationReader; import org.teavm.model.AnnotationValue; import org.teavm.model.CallLocation; import org.teavm.model.ClassHolder; import org.teavm.model.ClassReader; import org.teavm.model.ClassReaderSource; import org.teavm.model.ElementModifier; import org.teavm.model.FieldHolder; import org.teavm.model.FieldReference; import org.teavm.model.MethodHolder; import org.teavm.model.MethodReader; import org.teavm.model.MethodReference; import org.teavm.model.ValueType; import org.teavm.model.emit.ProgramEmitter; import org.teavm.model.emit.ValueEmitter; import org.teavm.model.instructions.ArrayElementType; import org.teavm.platform.PlatformAnnotationProvider; /** * * @author Alexey Andreev */ public class AnnotationDependencyListener extends AbstractDependencyListener { @Override public void classReached(DependencyAgent agent, String className, CallLocation location) { ClassReader cls = agent.getClassSource().get(className); if (cls == null) { return; } for (AnnotationReader annotation : cls.getAnnotations().all()) { agent.linkClass(annotation.getType(), location); } createAnnotationClass(agent, className); } private String getAnnotationImplementor(DependencyAgent agent, String annotationType) { String implementorName = annotationType + "$$_impl"; if (agent.getClassSource().get(implementorName) == null) { ClassHolder implementor = createImplementor(agent.getClassSource(), annotationType, implementorName); agent.submitClass(implementor); } return implementorName; } private ClassHolder createImplementor(ClassReaderSource classSource, String annotationType, String implementorName) { ClassHolder implementor = new ClassHolder(implementorName); implementor.setParent("java.lang.Object"); implementor.getInterfaces().add(annotationType); implementor.getModifiers().add(ElementModifier.FINAL); implementor.setLevel(AccessLevel.PUBLIC); ClassReader annotation = classSource.get(annotationType); if (annotation == null) { return implementor; } List<ValueType> ctorSignature = new ArrayList<>(); for (MethodReader methodDecl : annotation.getMethods()) { if (methodDecl.hasModifier(ElementModifier.STATIC)) { continue; } FieldHolder field = new FieldHolder("$" + methodDecl.getName()); field.setType(methodDecl.getResultType()); field.setLevel(AccessLevel.PRIVATE); implementor.addField(field); MethodHolder accessor = new MethodHolder(methodDecl.getDescriptor()); ProgramEmitter pe = ProgramEmitter.create(accessor); ValueEmitter thisVal = pe.newVar(); ValueEmitter result = thisVal.getField(field.getReference(), field.getType()); if (field.getType() instanceof ValueType.Array) { result = result.cloneArray(); } result.returnValue(); implementor.addMethod(accessor); ctorSignature.add(field.getType()); } ctorSignature.add(ValueType.VOID); MethodHolder ctor = new MethodHolder("<init>", ctorSignature.toArray(new ValueType[ctorSignature.size()])); ProgramEmitter pe = ProgramEmitter.create(ctor); ValueEmitter thisVal = pe.newVar(); thisVal.invokeSpecial(new MethodReference(Object.class, "<init>", void.class)); for (MethodReader methodDecl : annotation.getMethods()) { if (methodDecl.hasModifier(ElementModifier.STATIC)) { continue; } ValueEmitter param = pe.newVar(); FieldReference field = new FieldReference(implementorName, "$" + methodDecl.getName()); thisVal.setField(field, methodDecl.getResultType(), param); } pe.exit(); implementor.addMethod(ctor); MethodHolder annotTypeMethod = new MethodHolder("annotationType", ValueType.parse(Class.class)); pe = ProgramEmitter.create(annotTypeMethod); pe.newVar(); pe.constant(ValueType.object(annotationType)).returnValue(); implementor.addMethod(annotTypeMethod); return implementor; } @Override public void methodReached(DependencyAgent agent, MethodDependency method, CallLocation location) { ValueType type = method.getMethod().getResultType(); while (type instanceof ValueType.Array) { type = ((ValueType.Array)type).getItemType(); } if (type instanceof ValueType.Object) { String className = ((ValueType.Object)type).getClassName(); ClassReader cls = agent.getClassSource().get(className); if (cls != null && cls.hasModifier(ElementModifier.ANNOTATION)) { agent.linkClass(className, location); } } if (method.getMethod().hasModifier(ElementModifier.STATIC) && method.getMethod().getName().equals("$$__readAnnotations__$$")) { ClassReader cls = agent.getClassSource().get(method.getReference().getClassName()); if (cls != null) { for (AnnotationReader annotation : cls.getAnnotations().all()) { agent.linkClass(annotation.getType(), location); } } } } private void createAnnotationClass(DependencyAgent agent, String className) { String readerClassName = className + "$$__annotations__$$"; if (agent.getClassSource().get(readerClassName) != null) { return; } ClassHolder cls = new ClassHolder(className + "$$__annotations__$$"); cls.setLevel(AccessLevel.PUBLIC); cls.setOwnerName("java.lang.Object"); cls.getInterfaces().add(PlatformAnnotationProvider.class.getName()); MethodHolder ctor = new MethodHolder("<init>", ValueType.VOID); ctor.setLevel(AccessLevel.PUBLIC); ProgramEmitter pe = ProgramEmitter.create(ctor); ValueEmitter thisVar = pe.newVar(); thisVar.invokeSpecial(new MethodReference(Object.class, "<init>", void.class)); pe.exit(); ClassReader annotatedClass = agent.getClassSource().get(className); cls.addMethod(ctor); cls.addMethod(addReader(agent, annotatedClass)); agent.submitClass(cls); } private MethodHolder addReader(DependencyAgent agent, ClassReader cls) { MethodHolder readerMethod = new MethodHolder("getAnnotations", ValueType.parse(Annotation[].class)); readerMethod.setLevel(AccessLevel.PUBLIC); ProgramEmitter pe = ProgramEmitter.create(readerMethod); List<AnnotationReader> annotations = new ArrayList<>(); for (AnnotationReader annot : cls.getAnnotations().all()) { ClassReader annotType = agent.getClassSource().get(annot.getType()); if (annotType == null) { continue; } AnnotationReader retention = annotType.getAnnotations().get(Retention.class.getName()); if (retention != null) { String retentionPolicy = retention.getValue("value").getEnumValue().getFieldName(); if (retentionPolicy.equals("RUNTIME")) { annotations.add(annot); } } } ValueEmitter array = pe.constructArray(Annotation.class, annotations.size()); for (int i = 0; i < annotations.size(); ++i) { array.unwrapArray(ArrayElementType.OBJECT).setElement(i, generateAnnotationInstance(agent, pe, annotations.get(i))); } array.returnValue(); return readerMethod; } private ValueEmitter generateAnnotationInstance(DependencyAgent agent, ProgramEmitter pe, AnnotationReader annotation) { ClassReader annotationClass = agent.getClassSource().get(annotation.getType()); if (annotationClass == null) { return pe.constantNull(); } String className = getAnnotationImplementor(agent, annotation.getType()); List<ValueType> ctorSignature = new ArrayList<>(); List<ValueEmitter> params = new ArrayList<>(); for (MethodReader methodDecl : annotationClass.getMethods()) { ctorSignature.add(methodDecl.getResultType()); AnnotationValue value = annotation.getValue(methodDecl.getName()); if (value == null) { value = methodDecl.getAnnotationDefault(); } params.add(generateAnnotationValue(agent, pe, methodDecl.getResultType(), value)); } ctorSignature.add(ValueType.VOID); MethodReference ctor = new MethodReference(className, "<init>", ctorSignature.toArray( new ValueType[ctorSignature.size()])); return pe.construct(ctor, params.toArray(new ValueEmitter[params.size()])); } private ValueEmitter generateAnnotationValue(DependencyAgent agent, ProgramEmitter pe, ValueType type, AnnotationValue value) { switch (value.getType()) { case AnnotationValue.BOOLEAN: return pe.constant(value.getBoolean() ? 1 : 0); case AnnotationValue.BYTE: return pe.constant(value.getByte()); case AnnotationValue.SHORT: return pe.constant(value.getShort()); case AnnotationValue.INT: return pe.constant(value.getInt()); case AnnotationValue.LONG: return pe.constant(value.getLong()); case AnnotationValue.FLOAT: return pe.constant(value.getFloat()); case AnnotationValue.DOUBLE: return pe.constant(value.getDouble()); case AnnotationValue.STRING: return pe.constant(value.getString()); case AnnotationValue.LIST: { List<AnnotationValue> list = value.getList(); ValueType itemType = ((ValueType.Array)type).getItemType(); ValueEmitter array = pe.constructArray(itemType, list.size()); for (int i = 0; i < list.size(); ++i) { array.unwrapArray(ArrayElementType.OBJECT).setElement(i, generateAnnotationValue(agent, pe, itemType, list.get(i))); } return array; } case AnnotationValue.ENUM: pe.initClass(value.getEnumValue().getClassName()); return pe.getField(value.getEnumValue(), type); case AnnotationValue.CLASS: return pe.constant(value.getJavaClass()); case AnnotationValue.ANNOTATION: return generateAnnotationInstance(agent, pe, value.getAnnotation()); default: throw new IllegalArgumentException("Unknown annotation value type: " + value.getType()); } } }
/* * Copyright (c) 2004-2013 Regents of the University of California. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. Neither the name of the University nor the names of its contributors * may be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. * * Copyright (c) 2014 Martin Stockhammer */ package prefux.data.util; import java.util.Iterator; import prefux.Constants; import prefux.data.Edge; import prefux.data.Node; import prefux.data.Tuple; import prefux.util.collections.Queue; /** * Provides a distance-limited breadth first traversal over nodes, edges, * or both, using any number of traversal "roots". * * @author <a href="http://jheer.org">jeffrey heer</a> */ public class BreadthFirstIterator implements Iterator { protected Queue m_queue = new Queue(); protected int m_depth; protected int m_traversal; protected boolean m_includeNodes; protected boolean m_includeEdges; /** * Create an uninitialized BreadthFirstIterator. Use the * {@link #init(Object, int, int)} method to initialize the iterator. */ public BreadthFirstIterator() { // do nothing, requires init call } /** * Create a new BreadthFirstIterator starting from the given source node. * @param n the source node from which to begin the traversal * @param depth the maximum graph distance to traverse * @param traversal the traversal type, one of * {@link prefux.Constants#NODE_TRAVERSAL}, * {@link prefux.Constants#EDGE_TRAVERSAL}, or * {@link prefux.Constants#NODE_AND_EDGE_TRAVERSAL} */ public BreadthFirstIterator(Node n, int depth, int traversal) { init(new Node[] {n}, depth, traversal); } /** * Create a new BreadthFirstIterator starting from the given source nodes. * @param it an Iterator over the source nodes from which to begin the * traversal * @param depth the maximum graph distance to traverse * @param traversal the traversal type, one of * {@link prefux.Constants#NODE_TRAVERSAL}, * {@link prefux.Constants#EDGE_TRAVERSAL}, or * {@link prefux.Constants#NODE_AND_EDGE_TRAVERSAL} */ public BreadthFirstIterator(Iterator it, int depth, int traversal) { init(it, depth, traversal); } /** * Initialize (or re-initialize) this iterator. * @param o Either a source node or iterator over source nodes * @param depth the maximum graph distance to traverse * @param traversal the traversal type, one of * {@link prefux.Constants#NODE_TRAVERSAL}, * {@link prefux.Constants#EDGE_TRAVERSAL}, or * {@link prefux.Constants#NODE_AND_EDGE_TRAVERSAL} */ public void init(Object o, int depth, int traversal) { // initialize the member variables m_queue.clear(); m_depth = depth; if ( traversal < 0 || traversal >= Constants.TRAVERSAL_COUNT ) throw new IllegalArgumentException( "Unrecognized traversal type: "+traversal); m_traversal = traversal; m_includeNodes = (traversal == Constants.NODE_TRAVERSAL || traversal == Constants.NODE_AND_EDGE_TRAVERSAL); m_includeEdges = (traversal == Constants.EDGE_TRAVERSAL || traversal == Constants.NODE_AND_EDGE_TRAVERSAL); // seed the queue // TODO: clean this up? (use generalized iterator?) if ( m_includeNodes ) { if ( o instanceof Node ) { m_queue.add(o, 0); } else { Iterator tuples = (Iterator)o; while ( tuples.hasNext() ) m_queue.add(tuples.next(), 0); } } else { if ( o instanceof Node ) { Node n = (Node)o; m_queue.visit(n, 0); Iterator edges = getEdges(n); while ( edges.hasNext() ) { Edge e = (Edge)edges.next(); Node nn = e.getAdjacentNode(n); m_queue.visit(nn, 1); if ( m_queue.getDepth(e) < 0 ) m_queue.add(e, 1); } } else { Iterator tuples = (Iterator)o; while ( tuples.hasNext() ) { // TODO: graceful error handling when non-node in set? Node n = (Node)tuples.next(); m_queue.visit(n, 0); Iterator edges = getEdges(n); while ( edges.hasNext() ) { Edge e = (Edge)edges.next(); Node nn = e.getAdjacentNode(n); m_queue.visit(nn, 1); if ( m_queue.getDepth(e) < 0 ) m_queue.add(e, 1); } } } } } // ------------------------------------------------------------------------ /** * @see java.util.Iterator#remove() */ public void remove() { throw new UnsupportedOperationException(); } /** * @see java.util.Iterator#hasNext() */ public boolean hasNext() { return !m_queue.isEmpty(); } /** * Determines which edges are traversed for a given node. * @param n a node * @return an iterator over edges incident on the node */ protected Iterator getEdges(Node n) { return n.edges(); // TODO: add support for all edges, in links only, out links only } /** * Get the traversal depth at which a particular tuple was encountered. * @param t the tuple to lookup * @return the traversal depth of the tuple, or -1 if the tuple has not * been visited by the traversal. */ public int getDepth(Tuple t) { return m_queue.getDepth(t); } /** * @see java.util.Iterator#next() */ public Object next() { Tuple t = (Tuple)m_queue.removeFirst(); switch ( m_traversal ) { case Constants.NODE_TRAVERSAL: case Constants.NODE_AND_EDGE_TRAVERSAL: for ( ; true; t = (Tuple)m_queue.removeFirst() ) { if ( t instanceof Edge ) { return t; } else { Node n = (Node)t; int d = m_queue.getDepth(n); if ( d < m_depth ) { int dd = d+1; Iterator edges = getEdges(n); while ( edges.hasNext() ) { Edge e = (Edge)edges.next(); Node v = e.getAdjacentNode(n); if ( m_includeEdges && m_queue.getDepth(e) < 0 ) m_queue.add(e, dd); if ( m_queue.getDepth(v) < 0 ) m_queue.add(v, dd); } } else if ( m_includeEdges && d == m_depth ) { Iterator edges = getEdges(n); while ( edges.hasNext() ) { Edge e = (Edge)edges.next(); Node v = e.getAdjacentNode(n); int dv = m_queue.getDepth(v); if ( dv > 0 && m_queue.getDepth(e) < 0 ) { m_queue.add(e, Math.min(d,dv)); } } } return n; } } case Constants.EDGE_TRAVERSAL: Edge e = (Edge)t; Node u = e.getSourceNode(); Node v = e.getTargetNode(); int du = m_queue.getDepth(u); int dv = m_queue.getDepth(v); if ( du != dv ) { Node n = (dv > du ? v : u); int d = Math.max(du, dv); if ( d < m_depth ) { int dd = d+1; Iterator edges = getEdges(n); while ( edges.hasNext() ) { Edge ee = (Edge)edges.next(); if ( m_queue.getDepth(ee) >= 0 ) continue; // already visited Node nn = ee.getAdjacentNode(n); m_queue.visit(nn, dd); m_queue.add(ee, dd); } } } return e; default: throw new IllegalStateException(); } } } // end of class BreadthFirstIterator
package org.keycloak.adapters.saml; import org.jboss.logging.Logger; import org.keycloak.common.VerificationException; import org.keycloak.adapters.spi.AuthChallenge; import org.keycloak.adapters.spi.AuthOutcome; import org.keycloak.adapters.spi.HttpFacade; import org.keycloak.dom.saml.v2.assertion.AssertionType; import org.keycloak.dom.saml.v2.assertion.AttributeStatementType; import org.keycloak.dom.saml.v2.assertion.AttributeType; import org.keycloak.dom.saml.v2.assertion.AuthnStatementType; import org.keycloak.dom.saml.v2.assertion.NameIDType; import org.keycloak.dom.saml.v2.assertion.StatementAbstractType; import org.keycloak.dom.saml.v2.assertion.SubjectType; import org.keycloak.dom.saml.v2.protocol.LogoutRequestType; import org.keycloak.dom.saml.v2.protocol.RequestAbstractType; import org.keycloak.dom.saml.v2.protocol.ResponseType; import org.keycloak.dom.saml.v2.protocol.StatusResponseType; import org.keycloak.saml.BaseSAML2BindingBuilder; import org.keycloak.saml.SAML2LogoutRequestBuilder; import org.keycloak.saml.SAML2LogoutResponseBuilder; import org.keycloak.saml.SAMLRequestParser; import org.keycloak.saml.SignatureAlgorithm; import org.keycloak.saml.common.constants.GeneralConstants; import org.keycloak.saml.common.exceptions.ProcessingException; import org.keycloak.saml.common.util.Base64; import org.keycloak.saml.processing.api.saml.v2.sig.SAML2Signature; import org.keycloak.saml.processing.core.saml.v2.common.SAMLDocumentHolder; import org.keycloak.saml.processing.core.saml.v2.util.AssertionUtil; import org.keycloak.saml.processing.web.util.PostBindingUtil; import org.keycloak.common.util.KeycloakUriBuilder; import org.keycloak.common.util.MultivaluedHashMap; import org.w3c.dom.Document; import org.w3c.dom.Node; import java.security.PublicKey; import java.security.Signature; import java.util.HashSet; import java.util.List; import java.util.Set; /** * @author <a href="mailto:bill@burkecentral.com">Bill Burke</a> * @version $Revision: 1 $ */ public abstract class SamlAuthenticator { protected static Logger log = Logger.getLogger(SamlAuthenticator.class); protected HttpFacade facade; protected AuthChallenge challenge; protected SamlDeployment deployment; protected SamlSessionStore sessionStore; public SamlAuthenticator(HttpFacade facade, SamlDeployment deployment, SamlSessionStore sessionStore) { this.facade = facade; this.deployment = deployment; this.sessionStore = sessionStore; } public AuthChallenge getChallenge() { return challenge; } public AuthOutcome authenticate() { String samlRequest = facade.getRequest().getFirstParam(GeneralConstants.SAML_REQUEST_KEY); String samlResponse = facade.getRequest().getFirstParam(GeneralConstants.SAML_RESPONSE_KEY); String relayState = facade.getRequest().getFirstParam(GeneralConstants.RELAY_STATE); boolean globalLogout = "true".equals(facade.getRequest().getQueryParamValue("GLO")); if (samlRequest != null) { return handleSamlRequest(samlRequest, relayState); } else if (samlResponse != null) { return handleSamlResponse(samlResponse, relayState); } else if (sessionStore.isLoggedIn()) { if (globalLogout) { return globalLogout(); } if (verifySSL()) return AuthOutcome.FAILED; log.debug("AUTHENTICATED: was cached"); return AuthOutcome.AUTHENTICATED; } return initiateLogin(); } protected AuthOutcome globalLogout() { SamlSession account = sessionStore.getAccount(); if (account == null) { return AuthOutcome.NOT_ATTEMPTED; } SAML2LogoutRequestBuilder logoutBuilder = new SAML2LogoutRequestBuilder() .assertionExpiration(30) .issuer(deployment.getEntityID()) .sessionIndex(account.getSessionIndex()) .userPrincipal(account.getPrincipal().getSamlSubject(), account.getPrincipal().getNameIDFormat()) .destination(deployment.getIDP().getSingleLogoutService().getRequestBindingUrl()); BaseSAML2BindingBuilder binding = new BaseSAML2BindingBuilder(); if (deployment.getIDP().getSingleLogoutService().signRequest()) { binding.signWith(deployment.getSigningKeyPair()) .signDocument(); } binding.relayState("logout"); try { SamlUtil.sendSaml(true, facade, deployment.getIDP().getSingleLogoutService().getRequestBindingUrl(), binding, logoutBuilder.buildDocument(), deployment.getIDP().getSingleLogoutService().getRequestBinding()); } catch (Exception e) { log.error("Could not send global logout SAML request", e); return AuthOutcome.FAILED; } return AuthOutcome.NOT_ATTEMPTED; } protected AuthOutcome handleSamlRequest(String samlRequest, String relayState) { SAMLDocumentHolder holder = null; boolean postBinding = false; String requestUri = facade.getRequest().getURI(); if (facade.getRequest().getMethod().equalsIgnoreCase("GET")) { // strip out query params int index = requestUri.indexOf('?'); if (index > -1) { requestUri = requestUri.substring(0, index); } holder = SAMLRequestParser.parseRequestRedirectBinding(samlRequest); } else { postBinding = true; holder = SAMLRequestParser.parseRequestPostBinding(samlRequest); } RequestAbstractType requestAbstractType = (RequestAbstractType) holder.getSamlObject(); if (!requestUri.equals(requestAbstractType.getDestination().toString())) { log.error("expected destination '" + requestUri + "' got '" + requestAbstractType.getDestination() + "'"); return AuthOutcome.FAILED; } if (requestAbstractType instanceof LogoutRequestType) { if (deployment.getIDP().getSingleLogoutService().validateRequestSignature()) { try { validateSamlSignature(holder, postBinding, GeneralConstants.SAML_REQUEST_KEY); } catch (VerificationException e) { log.error("Failed to verify saml request signature", e); return AuthOutcome.FAILED; } } LogoutRequestType logout = (LogoutRequestType) requestAbstractType; return logoutRequest(logout, relayState); } else { log.error("unknown SAML request type"); return AuthOutcome.FAILED; } } protected AuthOutcome logoutRequest(LogoutRequestType request, String relayState) { if (request.getSessionIndex() == null || request.getSessionIndex().isEmpty()) { sessionStore.logoutByPrincipal(request.getNameID().getValue()); } else { sessionStore.logoutBySsoId(request.getSessionIndex()); } String issuerURL = deployment.getEntityID(); SAML2LogoutResponseBuilder builder = new SAML2LogoutResponseBuilder(); builder.logoutRequestID(request.getID()); builder.destination(deployment.getIDP().getSingleLogoutService().getResponseBindingUrl()); builder.issuer(issuerURL); BaseSAML2BindingBuilder binding = new BaseSAML2BindingBuilder().relayState(relayState); if (deployment.getIDP().getSingleLogoutService().signResponse()) { binding.signatureAlgorithm(deployment.getSignatureAlgorithm()) .signWith(deployment.getSigningKeyPair()) .signDocument(); if (deployment.getSignatureCanonicalizationMethod() != null) binding.canonicalizationMethod(deployment.getSignatureCanonicalizationMethod()); } try { SamlUtil.sendSaml(false, facade, deployment.getIDP().getSingleLogoutService().getResponseBindingUrl(), binding, builder.buildDocument(), deployment.getIDP().getSingleLogoutService().getResponseBinding()); } catch (Exception e) { log.error("Could not send logout response SAML request", e); return AuthOutcome.FAILED; } return AuthOutcome.NOT_ATTEMPTED; } protected AuthOutcome handleSamlResponse(String samlResponse, String relayState) { SAMLDocumentHolder holder = null; boolean postBinding = false; String requestUri = facade.getRequest().getURI(); if (facade.getRequest().getMethod().equalsIgnoreCase("GET")) { int index = requestUri.indexOf('?'); if (index > -1) { requestUri = requestUri.substring(0, index); } holder = extractRedirectBindingResponse(samlResponse); } else { postBinding = true; holder = extractPostBindingResponse(samlResponse); } StatusResponseType statusResponse = (StatusResponseType)holder.getSamlObject(); // validate destination if (!requestUri.equals(statusResponse.getDestination())) { log.error("Request URI does not match SAML request destination"); return AuthOutcome.FAILED; } if (statusResponse instanceof ResponseType) { if (deployment.getIDP().getSingleSignOnService().validateResponseSignature()) { try { validateSamlSignature(holder, postBinding, GeneralConstants.SAML_RESPONSE_KEY); } catch (VerificationException e) { log.error("Failed to verify saml response signature", e); return AuthOutcome.FAILED; } } return handleLoginResponse((ResponseType)statusResponse); } else { if (deployment.getIDP().getSingleLogoutService().validateResponseSignature()) { try { validateSamlSignature(holder, postBinding, GeneralConstants.SAML_RESPONSE_KEY); } catch (VerificationException e) { log.error("Failed to verify saml response signature", e); return AuthOutcome.FAILED; } } // todo need to check that it is actually a LogoutResponse return handleLogoutResponse(holder, statusResponse, relayState); } } private void validateSamlSignature(SAMLDocumentHolder holder, boolean postBinding, String paramKey) throws VerificationException { if (postBinding) { verifyPostBindingSignature(holder.getSamlDocument(), deployment.getIDP().getSignatureValidationKey()); } else { verifyRedirectBindingSignature(deployment.getIDP().getSignatureValidationKey(), paramKey); } } protected AuthOutcome handleLoginResponse(ResponseType responseType) { AssertionType assertion = null; try { assertion = AssertionUtil.getAssertion(responseType, deployment.getDecryptionKey()); if (AssertionUtil.hasExpired(assertion)) { return initiateLogin(); } } catch (Exception e) { log.error("Error extracting SAML assertion, e"); return AuthOutcome.FAILED; } SubjectType subject = assertion.getSubject(); SubjectType.STSubType subType = subject.getSubType(); NameIDType subjectNameID = (NameIDType) subType.getBaseID(); String principalName = subjectNameID.getValue(); final Set<String> roles = new HashSet<>(); MultivaluedHashMap<String, String> attributes = new MultivaluedHashMap<>(); MultivaluedHashMap<String, String> friendlyAttributes = new MultivaluedHashMap<>(); Set<StatementAbstractType> statements = assertion.getStatements(); for (StatementAbstractType statement : statements) { if (statement instanceof AttributeStatementType) { AttributeStatementType attributeStatement = (AttributeStatementType) statement; List<AttributeStatementType.ASTChoiceType> attList = attributeStatement.getAttributes(); for (AttributeStatementType.ASTChoiceType obj : attList) { AttributeType attr = obj.getAttribute(); if (isRole(attr)) { List<Object> attributeValues = attr.getAttributeValue(); if (attributeValues != null) { for (Object attrValue : attributeValues) { String role = getAttributeValue(attrValue); log.debugv("Add role: {0}", role); roles.add(role); } } } else { List<Object> attributeValues = attr.getAttributeValue(); if (attributeValues != null) { for (Object attrValue : attributeValues) { String value = getAttributeValue(attrValue); if (attr.getName() != null) { attributes.add(attr.getName(), value); } if (attr.getFriendlyName() != null) { friendlyAttributes.add(attr.getFriendlyName(), value); } } } } } } } if (deployment.getPrincipalNamePolicy() == SamlDeployment.PrincipalNamePolicy.FROM_ATTRIBUTE) { if (deployment.getPrincipalAttributeName() != null) { String attribute = attributes.getFirst(deployment.getPrincipalAttributeName()); if (attribute != null) principalName = attribute; else { attribute = friendlyAttributes.getFirst(deployment.getPrincipalAttributeName()); if (attribute != null) principalName = attribute; } } } AuthnStatementType authn = null; for (Object statement : assertion.getStatements()) { if (statement instanceof AuthnStatementType) { authn = (AuthnStatementType)statement; break; } } final SamlPrincipal principal = new SamlPrincipal(principalName, principalName, subjectNameID.getFormat().toString(), attributes, friendlyAttributes); String index = authn == null ? null : authn.getSessionIndex(); final String sessionIndex = index; SamlSession account = new SamlSession(principal, roles, sessionIndex); sessionStore.saveAccount(account); completeAuthentication(account); // redirect to original request, it will be restored String redirectUri = sessionStore.getRedirectUri(); if (redirectUri != null) { facade.getResponse().setHeader("Location", redirectUri); facade.getResponse().setStatus(302); facade.getResponse().end(); } else { log.debug("IDP initiated invocation"); } log.debug("AUTHENTICATED authn"); return AuthOutcome.AUTHENTICATED; } protected abstract void completeAuthentication(SamlSession account); private String getAttributeValue(Object attrValue) { String value = null; if (attrValue instanceof String) { value = (String)attrValue; } else if (attrValue instanceof Node) { Node roleNode = (Node) attrValue; value = roleNode.getFirstChild().getNodeValue(); } else if (attrValue instanceof NameIDType) { NameIDType nameIdType = (NameIDType) attrValue; value = nameIdType.getValue(); } else { log.warn("Unable to extract unknown SAML assertion attribute value type: " + attrValue.getClass().getName()); } return value; } protected boolean isRole(AttributeType attribute) { return (attribute.getName() != null && deployment.getRoleAttributeNames().contains(attribute.getName())) || (attribute.getFriendlyName() != null && deployment.getRoleAttributeNames().contains(attribute.getFriendlyName())); } protected AuthOutcome handleLogoutResponse(SAMLDocumentHolder holder, StatusResponseType responseType, String relayState) { boolean loggedIn = sessionStore.isLoggedIn(); if (!loggedIn || !"logout".equals(relayState)) { return AuthOutcome.NOT_ATTEMPTED; } sessionStore.logoutAccount(); return AuthOutcome.LOGGED_OUT; } protected SAMLDocumentHolder extractRedirectBindingResponse(String response) { return SAMLRequestParser.parseRequestRedirectBinding(response); } protected SAMLDocumentHolder extractPostBindingResponse(String response) { byte[] samlBytes = PostBindingUtil.base64Decode(response); String xml = new String(samlBytes); return SAMLRequestParser.parseResponseDocument(samlBytes); } protected AuthOutcome initiateLogin() { challenge = new InitiateLogin(deployment, sessionStore); return AuthOutcome.NOT_ATTEMPTED; } protected boolean verifySSL() { if (!facade.getRequest().isSecure() && deployment.getSslRequired().isRequired(facade.getRequest().getRemoteAddr())) { log.warn("SSL is required to authenticate"); return true; } return false; } public void verifyPostBindingSignature(Document document, PublicKey publicKey) throws VerificationException { SAML2Signature saml2Signature = new SAML2Signature(); try { if (!saml2Signature.validate(document, publicKey)) { throw new VerificationException("Invalid signature on document"); } } catch (ProcessingException e) { throw new VerificationException("Error validating signature", e); } } public void verifyRedirectBindingSignature(PublicKey publicKey, String paramKey) throws VerificationException { String request = facade.getRequest().getQueryParamValue(paramKey); String algorithm = facade.getRequest().getQueryParamValue(GeneralConstants.SAML_SIG_ALG_REQUEST_KEY); String signature = facade.getRequest().getQueryParamValue(GeneralConstants.SAML_SIGNATURE_REQUEST_KEY); String decodedAlgorithm = facade.getRequest().getQueryParamValue(GeneralConstants.SAML_SIG_ALG_REQUEST_KEY); if (request == null) { throw new VerificationException("SAML Request was null"); } if (algorithm == null) throw new VerificationException("SigAlg was null"); if (signature == null) throw new VerificationException("Signature was null"); // Shibboleth doesn't sign the document for redirect binding. // todo maybe a flag? String relayState = facade.getRequest().getQueryParamValue(GeneralConstants.RELAY_STATE); KeycloakUriBuilder builder = KeycloakUriBuilder.fromPath("/") .queryParam(paramKey, request); if (relayState != null) { builder.queryParam(GeneralConstants.RELAY_STATE, relayState); } builder.queryParam(GeneralConstants.SAML_SIG_ALG_REQUEST_KEY, algorithm); String rawQuery = builder.build().getRawQuery(); try { //byte[] decodedSignature = RedirectBindingUtil.urlBase64Decode(signature); byte[] decodedSignature = Base64.decode(signature); SignatureAlgorithm signatureAlgorithm = SignatureAlgorithm.getFromXmlMethod(decodedAlgorithm); Signature validator = signatureAlgorithm.createSignature(); // todo plugin signature alg validator.initVerify(publicKey); validator.update(rawQuery.getBytes("UTF-8")); if (!validator.verify(decodedSignature)) { throw new VerificationException("Invalid query param signature"); } } catch (Exception e) { throw new VerificationException(e); } } }
/* * Copyright 2015 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.workbench.common.widgets.client.datamodel; import java.net.URL; import java.util.HashMap; import javax.enterprise.context.spi.CreationalContext; import javax.enterprise.inject.spi.Bean; import javax.enterprise.inject.spi.BeanManager; import org.drools.workbench.models.datamodel.oracle.PackageDataModelOracle; import org.drools.workbench.models.datamodel.oracle.ProjectDataModelOracle; import org.jboss.errai.common.client.api.Caller; import org.jboss.weld.environment.se.StartMain; import org.junit.Before; import org.junit.Test; import org.kie.workbench.common.services.datamodel.backend.server.service.DataModelService; import org.kie.workbench.common.services.datamodel.model.PackageDataModelOracleBaselinePayload; import org.kie.workbench.common.services.datamodel.service.IncrementalDataModelService; import org.uberfire.backend.server.util.Paths; import org.uberfire.backend.vfs.Path; import org.uberfire.client.callbacks.Callback; import org.uberfire.java.nio.fs.file.SimpleFileSystemProvider; import static org.junit.Assert.*; import static org.kie.workbench.common.widgets.client.datamodel.PackageDataModelOracleTestUtils.*; import static org.mockito.Mockito.*; /** * Tests for DataModelService */ public class PackageDataModelExtendJavaTypeTest { private final SimpleFileSystemProvider fs = new SimpleFileSystemProvider(); private BeanManager beanManager; private Paths paths; @Before public void setUp() throws Exception { //Bootstrap WELD container StartMain startMain = new StartMain( new String[ 0 ] ); beanManager = startMain.go().getBeanManager(); //Instantiate Paths used in tests for Path conversion final Bean pathsBean = (Bean) beanManager.getBeans( Paths.class ).iterator().next(); final CreationalContext cc = beanManager.createCreationalContext( pathsBean ); paths = (Paths) beanManager.getReference( pathsBean, Paths.class, cc ); //Ensure URLs use the default:// scheme fs.forceAsDefault(); } @Test public void testPackageExtendJavaTypeWithQualifiedDRLBeanName() throws Exception { final Bean dataModelServiceBean = (Bean) beanManager.getBeans( DataModelService.class ).iterator().next(); final CreationalContext cc = beanManager.createCreationalContext( dataModelServiceBean ); final DataModelService dataModelService = (DataModelService) beanManager.getReference( dataModelServiceBean, DataModelService.class, cc ); final URL packageUrl = this.getClass().getResource( "/DataModelBackendExtendJavaTypeTest1/src/main/java/t4p1" ); final org.uberfire.java.nio.file.Path nioPackagePath = fs.getPath( packageUrl.toURI() ); final Path packagePath = paths.convert( nioPackagePath ); final PackageDataModelOracle projectLoader = dataModelService.getDataModel( packagePath ); //Emulate server-to-client conversions final MockAsyncPackageDataModelOracleImpl oracle = new MockAsyncPackageDataModelOracleImpl(); final Caller<IncrementalDataModelService> service = new MockIncrementalDataModelServiceCaller( projectLoader ); oracle.setService( service ); final PackageDataModelOracleBaselinePayload dataModel = new PackageDataModelOracleBaselinePayload(); dataModel.setPackageName( "t4p1" ); dataModel.setModelFields( projectLoader.getProjectModelFields() ); dataModel.setEventTypes( new HashMap<String, Boolean>() { { put( "t4p1.Bean1", true ); } } ); PackageDataModelOracleTestUtils.populateDataModelOracle( mock( Path.class ), new MockHasImports(), oracle, dataModel ); assertNotNull( oracle ); assertEquals( 1, oracle.getFactTypes().length ); assertContains( "Bean1", oracle.getFactTypes() ); oracle.isFactTypeAnEvent( "Bean1", new Callback<Boolean>() { @Override public void callback( final Boolean result ) { assertTrue( result ); } } ); } @Test public void testProjectExtendJavaTypeWithQualifiedDRLBeanName() throws Exception { final Bean dataModelServiceBean = (Bean) beanManager.getBeans( DataModelService.class ).iterator().next(); final CreationalContext cc = beanManager.createCreationalContext( dataModelServiceBean ); final DataModelService dataModelService = (DataModelService) beanManager.getReference( dataModelServiceBean, DataModelService.class, cc ); final URL packageUrl = this.getClass().getResource( "/DataModelBackendExtendJavaTypeTest1/src/main/java/t4p1" ); final org.uberfire.java.nio.file.Path nioPackagePath = fs.getPath( packageUrl.toURI() ); final Path packagePath = paths.convert( nioPackagePath ); final PackageDataModelOracle packageLoader = dataModelService.getDataModel( packagePath ); final ProjectDataModelOracle projectLoader = dataModelService.getProjectDataModel( packagePath ); //Emulate server-to-client conversions final MockAsyncPackageDataModelOracleImpl oracle = new MockAsyncPackageDataModelOracleImpl(); final Caller<IncrementalDataModelService> service = new MockIncrementalDataModelServiceCaller( packageLoader ); oracle.setService( service ); final PackageDataModelOracleBaselinePayload dataModel = new PackageDataModelOracleBaselinePayload(); dataModel.setPackageName( "t4p1" ); dataModel.setModelFields( projectLoader.getProjectModelFields() ); dataModel.setEventTypes( new HashMap<String, Boolean>() { { put( "t4p1.Bean1", true ); } } ); PackageDataModelOracleTestUtils.populateDataModelOracle( mock( Path.class ), new MockHasImports(), oracle, dataModel ); assertNotNull( oracle ); assertEquals( 1, oracle.getFactTypes().length ); assertContains( "Bean1", oracle.getFactTypes() ); oracle.isFactTypeAnEvent( "Bean1", new Callback<Boolean>() { @Override public void callback( final Boolean result ) { assertTrue( result ); } } ); } @Test public void testPackageExtendJavaTypeWithImport() throws Exception { final Bean dataModelServiceBean = (Bean) beanManager.getBeans( DataModelService.class ).iterator().next(); final CreationalContext cc = beanManager.createCreationalContext( dataModelServiceBean ); final DataModelService dataModelService = (DataModelService) beanManager.getReference( dataModelServiceBean, DataModelService.class, cc ); final URL packageUrl = this.getClass().getResource( "/DataModelBackendExtendJavaTypeTest2/src/main/java/t5p1" ); final org.uberfire.java.nio.file.Path nioPackagePath = fs.getPath( packageUrl.toURI() ); final Path packagePath = paths.convert( nioPackagePath ); final PackageDataModelOracle projectLoader = dataModelService.getDataModel( packagePath ); //Emulate server-to-client conversions final MockAsyncPackageDataModelOracleImpl oracle = new MockAsyncPackageDataModelOracleImpl(); final Caller<IncrementalDataModelService> service = new MockIncrementalDataModelServiceCaller( projectLoader ); oracle.setService( service ); final PackageDataModelOracleBaselinePayload dataModel = new PackageDataModelOracleBaselinePayload(); dataModel.setPackageName( "t5p1" ); dataModel.setModelFields( projectLoader.getProjectModelFields() ); dataModel.setEventTypes( new HashMap<String, Boolean>() { { put( "t5p1.Bean1", true ); } } ); PackageDataModelOracleTestUtils.populateDataModelOracle( mock( Path.class ), new MockHasImports(), oracle, dataModel ); assertNotNull( oracle ); assertEquals( 1, oracle.getFactTypes().length ); assertContains( "Bean1", oracle.getFactTypes() ); oracle.isFactTypeAnEvent( "Bean1", new Callback<Boolean>() { @Override public void callback( final Boolean result ) { assertTrue( result ); } } ); } @Test public void testProjectExtendJavaTypeWithImport() throws Exception { final Bean dataModelServiceBean = (Bean) beanManager.getBeans( DataModelService.class ).iterator().next(); final CreationalContext cc = beanManager.createCreationalContext( dataModelServiceBean ); final DataModelService dataModelService = (DataModelService) beanManager.getReference( dataModelServiceBean, DataModelService.class, cc ); final URL packageUrl = this.getClass().getResource( "/DataModelBackendExtendJavaTypeTest2/src/main/java/t5p1" ); final org.uberfire.java.nio.file.Path nioPackagePath = fs.getPath( packageUrl.toURI() ); final Path packagePath = paths.convert( nioPackagePath ); final PackageDataModelOracle packageLoader = dataModelService.getDataModel( packagePath ); final ProjectDataModelOracle projectLoader = dataModelService.getProjectDataModel( packagePath ); //Emulate server-to-client conversions final MockAsyncPackageDataModelOracleImpl oracle = new MockAsyncPackageDataModelOracleImpl(); final Caller<IncrementalDataModelService> service = new MockIncrementalDataModelServiceCaller( packageLoader ); oracle.setService( service ); final PackageDataModelOracleBaselinePayload dataModel = new PackageDataModelOracleBaselinePayload(); dataModel.setPackageName( "t5p1" ); dataModel.setModelFields( projectLoader.getProjectModelFields() ); dataModel.setEventTypes( new HashMap<String, Boolean>() { { put( "t5p1.Bean1", true ); } } ); PackageDataModelOracleTestUtils.populateDataModelOracle( mock( Path.class ), new MockHasImports(), oracle, dataModel ); assertNotNull( oracle ); assertEquals( 1, oracle.getFactTypes().length ); assertContains( "Bean1", oracle.getFactTypes() ); oracle.isFactTypeAnEvent( "Bean1", new Callback<Boolean>() { @Override public void callback( final Boolean result ) { assertTrue( result ); } } ); } }
/* ### * IP: GHIDRA * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ghidra.app.util.bin.format.dwarf4; import static ghidra.app.util.bin.format.dwarf4.encoding.DWARFAttribute.*; import static ghidra.app.util.bin.format.dwarf4.encoding.DWARFEncoding.*; import static ghidra.app.util.bin.format.dwarf4.encoding.DWARFTag.*; import static org.junit.Assert.*; import java.io.IOException; import org.junit.After; import org.junit.Before; import ghidra.app.plugin.core.analysis.AutoAnalysisManager; import ghidra.app.services.DataTypeManagerService; import ghidra.app.util.bin.format.dwarf4.encoding.DWARFSourceLanguage; import ghidra.app.util.bin.format.dwarf4.encoding.DWARFTag; import ghidra.app.util.bin.format.dwarf4.next.*; import ghidra.app.util.bin.format.dwarf4.next.sectionprovider.NullSectionProvider; import ghidra.program.database.ProgramBuilder; import ghidra.program.database.ProgramDB; import ghidra.program.database.data.DataTypeManagerDB; import ghidra.program.model.address.Address; import ghidra.program.model.address.AddressSpace; import ghidra.program.model.data.*; import ghidra.test.AbstractGhidraHeadedIntegrationTest; import ghidra.util.exception.CancelledException; import ghidra.util.task.TaskMonitor; /** * Base class for unit tests needing DWARF DIEs. Provides 2 DWARF compile-units and helper * methods to create DIE records. */ public class DWARFTestBase extends AbstractGhidraHeadedIntegrationTest { protected static final long BaseAddress = 0x400; protected ProgramDB program; protected AddressSpace space; protected DataTypeManagerDB dataMgr; protected DataTypeManager builtInDTM; protected int transactionID; protected TaskMonitor monitor = TaskMonitor.DUMMY; protected DWARFImportOptions importOptions; protected DWARFProgram dwarfProg; protected MockDWARFCompilationUnit cu; protected MockDWARFCompilationUnit cu2; protected DWARFDataTypeManager dwarfDTM; protected CategoryPath rootCP; /* * @see TestCase#setUp() */ @Before public void setUp() throws Exception { program = createDefaultProgram(testName.getMethodName(), ProgramBuilder._X64, this); space = program.getAddressFactory().getDefaultAddressSpace(); dataMgr = program.getDataTypeManager(); startTransaction(); program.getMemory() .createInitializedBlock("test", addr(BaseAddress), 500, (byte) 0, TaskMonitor.DUMMY, false); AutoAnalysisManager mgr = AutoAnalysisManager.getAnalysisManager(program); DataTypeManagerService dtms = mgr.getDataTypeManagerService(); builtInDTM = dtms.getBuiltInDataTypesManager(); importOptions = new DWARFImportOptions(); dwarfProg = new DWARFProgram(program, importOptions, TaskMonitor.DUMMY, new NullSectionProvider()); rootCP = dwarfProg.getUncategorizedRootDNI().asCategoryPath(); cu = new MockDWARFCompilationUnit(dwarfProg, 0x1000, 0x2000, 0, DWARFCompilationUnit.DWARF_32, (short) 4, 0, (byte) 8, 0, DWARFSourceLanguage.DW_LANG_C); cu2 = new MockDWARFCompilationUnit(dwarfProg, 0x3000, 0x4000, 0, DWARFCompilationUnit.DWARF_32, (short) 4, 0, (byte) 8, 1, DWARFSourceLanguage.DW_LANG_C); setMockCompilationUnits(cu, cu2); DWARFImportSummary importSummary = new DWARFImportSummary(); dwarfDTM = new DWARFDataTypeManager(dwarfProg, dataMgr, builtInDTM, importSummary); } /* * @see TestCase#tearDown() */ @After public void tearDown() throws Exception { endTransaction(); dwarfProg.close(); program.release(this); } protected void setMockCompilationUnits(DWARFCompilationUnit... compilationUnits) { dwarfProg.getCompilationUnits().clear(); for (DWARFCompilationUnit compilationUnit : compilationUnits) { dwarfProg.getCompilationUnits().add(compilationUnit); } } protected void startTransaction() { transactionID = program.startTransaction("Test"); } protected void endTransaction() { program.endTransaction(transactionID, true); } protected void checkPreconditions() throws CancelledException, DWARFException, IOException { dwarfProg.checkPreconditions(monitor); } protected void importAllDataTypes() throws CancelledException, IOException, DWARFException { dwarfProg.checkPreconditions(monitor); dwarfDTM.importAllDataTypes(monitor); } protected void importFunctions() throws CancelledException, IOException, DWARFException { dwarfProg.checkPreconditions(monitor); dwarfDTM.importAllDataTypes(monitor); DWARFImportSummary importSummary = new DWARFImportSummary(); DWARFFunctionImporter dfi = new DWARFFunctionImporter(dwarfProg, dwarfDTM, importOptions, importSummary, monitor); dfi.importFunctions(); } protected DIEAggregate getAggregate(DebugInfoEntry die) throws CancelledException, IOException, DWARFException { dwarfProg.setCurrentCompilationUnit(die.getCompilationUnit(), monitor); return dwarfProg.getAggregate(die); } protected DebugInfoEntry addBaseType(String name, int size, int encoding, MockDWARFCompilationUnit dcu) { DIECreator tmp = new DIECreator(DW_TAG_base_type) .addInt(DW_AT_byte_size, size) .addInt(DW_AT_encoding, encoding); if (name != null) { tmp.addString(DW_AT_name, name); } return tmp.create(dcu); } protected DebugInfoEntry addInt(MockDWARFCompilationUnit dcu) { return addBaseType("int", 4, DW_ATE_signed, dcu); } protected DebugInfoEntry addFloat(MockDWARFCompilationUnit dcu) { return addBaseType("float", 4, DW_ATE_float, dcu); } protected DebugInfoEntry addDouble(MockDWARFCompilationUnit dcu) { return addBaseType("double", 8, DW_ATE_float, dcu); } protected DebugInfoEntry addTypedef(String name, DebugInfoEntry die, MockDWARFCompilationUnit dcu) { assertTrue(die.getCompilationUnit() == dcu); return new DIECreator(DW_TAG_typedef) .addString(DW_AT_name, name) .addRef(DW_AT_type, die) .create(dcu); } protected DebugInfoEntry addSubprogram(String name, DebugInfoEntry returnTypeDIE, MockDWARFCompilationUnit dcu) { assertTrue(returnTypeDIE == null || returnTypeDIE.getCompilationUnit() == dcu); DIECreator tmp = new DIECreator(DW_TAG_subprogram); if (name != null) { tmp.addString(DW_AT_name, name); } if (returnTypeDIE != null) { tmp.addRef(DW_AT_type, returnTypeDIE); } return tmp.create(dcu); } protected DebugInfoEntry addSubroutineType(String name, DebugInfoEntry returnTypeDIE, MockDWARFCompilationUnit dcu) { assertTrue(returnTypeDIE == null || returnTypeDIE.getCompilationUnit() == dcu); DIECreator tmp = new DIECreator(DW_TAG_subroutine_type); if (name != null) { tmp.addString(DW_AT_name, name); } if (returnTypeDIE != null) { tmp.addRef(DW_AT_type, returnTypeDIE); } return tmp.create(dcu); } protected DebugInfoEntry addParam(DebugInfoEntry parent, String name, DebugInfoEntry typeDIE, MockDWARFCompilationUnit dcu) { assertTrue(typeDIE == null || typeDIE.getCompilationUnit() == dcu); assertTrue(parent.getCompilationUnit() == dcu); return new DIECreator(DW_TAG_formal_parameter) .addRef(DW_AT_type, typeDIE) .setParent(parent) .create(dcu); } protected DIECreator newSpecStruct(DebugInfoEntry declDIE, int size) { DIECreator struct = new DIECreator(DW_TAG_structure_type) .addRef(DW_AT_specification, declDIE) .addInt(DW_AT_byte_size, size); return struct; } protected DIECreator newDeclStruct(String name) { DIECreator struct = new DIECreator(DW_TAG_structure_type) .addBoolean(DW_AT_declaration, true) .addString(DW_AT_name, name); return struct; } protected DIECreator newStruct(String name, int size) { DIECreator struct = new DIECreator(DW_TAG_structure_type); if (name != null) { struct.addString(DW_AT_name, name); } struct.addInt(DW_AT_byte_size, size); return struct; } protected DebugInfoEntry createEnum(String name, int size, MockDWARFCompilationUnit dcu) { DIECreator resultEnum = new DIECreator(DW_TAG_enumeration_type); if (name != null) { resultEnum.addString(DW_AT_name, name); } resultEnum.addInt(DW_AT_byte_size, size); return resultEnum.create(dcu); } protected DebugInfoEntry addEnumValue(DebugInfoEntry parentEnum, String valueName, long valueValue, MockDWARFCompilationUnit dcu) { assertTrue(parentEnum.getCompilationUnit() == dcu); DIECreator enumValue = new DIECreator(DW_TAG_enumerator) .addString(DW_AT_name, valueName) .addInt(DW_AT_const_value, valueValue) .setParent(parentEnum); return enumValue.create(dcu); } protected DebugInfoEntry addPtr(DebugInfoEntry targetDIE, MockDWARFCompilationUnit dcu) { assertTrue(targetDIE.getCompilationUnit() == dcu); return new DIECreator(DW_TAG_pointer_type).addRef(DW_AT_type, targetDIE).create(dcu); } protected DebugInfoEntry addFwdPtr(MockDWARFCompilationUnit dcu, int fwdRecordOffset) { return new DIECreator(DW_TAG_pointer_type) .addRef(DW_AT_type, getForwardOffset(dcu, fwdRecordOffset)) .create(dcu); } protected long getForwardOffset(MockDWARFCompilationUnit dcu, int count) { return dcu.getStartOffset() + dcu.getMockEntryCount() + count; } protected DIECreator newMember(DebugInfoEntry parentStruct, String fieldName, DebugInfoEntry dataType, int offset) { assertTrue( dataType == null || dataType.getCompilationUnit() == parentStruct.getCompilationUnit()); return newMember(parentStruct, fieldName, dataType.getOffset(), offset); } protected DIECreator newMember(DebugInfoEntry parentStruct, String fieldName, long memberDIEOffset, int offset) { DIECreator field = new DIECreator(DWARFTag.DW_TAG_member) .addString(DW_AT_name, fieldName) .addRef(DW_AT_type, memberDIEOffset) .setParent(parentStruct); if (offset != -1) { field.addInt(DW_AT_data_member_location, offset); } return field; } protected DIECreator newInherit(DebugInfoEntry parentStruct, DebugInfoEntry dataType, int offset) { assertTrue( dataType == null || dataType.getCompilationUnit() == parentStruct.getCompilationUnit()); DIECreator field = new DIECreator(DW_TAG_inheritance) .addRef(DW_AT_type, dataType) .addInt(DW_AT_data_member_location, offset) .setParent(parentStruct); return field; } protected DebugInfoEntry newArray(MockDWARFCompilationUnit dcu, DebugInfoEntry baseTypeDIE, boolean elideEmptyDimRangeValue, int... dimensions) { DebugInfoEntry arrayType = new DIECreator(DW_TAG_array_type) .addRef(DW_AT_type, baseTypeDIE) .create(dcu); for (int dimIndex = 0; dimIndex < dimensions.length; dimIndex++) { int dim = dimensions[dimIndex]; DIECreator dimDIE = new DIECreator(DW_TAG_subrange_type).setParent(arrayType); if (dim != -1 || !elideEmptyDimRangeValue) { dimDIE.addInt(DW_AT_upper_bound, dimensions[dimIndex]); } dimDIE.create(dcu); } return arrayType; } protected DebugInfoEntry newArrayUsingCount(MockDWARFCompilationUnit dcu, DebugInfoEntry baseTypeDIE, int count) { DebugInfoEntry arrayType = new DIECreator(DW_TAG_array_type) .addRef(DW_AT_type, baseTypeDIE) .create(dcu); DIECreator dimDIE = new DIECreator(DW_TAG_subrange_type) .setParent(arrayType); dimDIE.addInt(DW_AT_count, count); dimDIE.create(dcu); return arrayType; } protected DIECreator newSubprogram(String name, DebugInfoEntry returnType, long startAddress, long length) { return new DIECreator(DW_TAG_subprogram) .addString(DW_AT_name, name) .addRef(DW_AT_type, returnType) .addUInt(DW_AT_low_pc, startAddress) .addUInt(DW_AT_high_pc, length); } protected DIECreator newFormalParam(DebugInfoEntry subprogram, String paramName, DebugInfoEntry paramDataType, int... locationExpr) { DIECreator param = new DIECreator(DW_TAG_formal_parameter) .addString(DW_AT_name, paramName) .addRef(DW_AT_type, paramDataType) .addBlock(DW_AT_location, locationExpr) .setParent(subprogram); return param; } protected Address addr(long l) { return space.getAddress(l); } protected void assertHasFlexArray(Structure struct) { DataTypeComponent component = struct.getComponent(struct.getNumComponents() - 1); assertNotNull(component); assertEquals(0, component.getLength()); DataType dt = component.getDataType(); assertTrue(dt instanceof Array); Array a = (Array) dt; assertEquals(0, a.getNumElements()); } protected void assertMissingFlexArray(Structure struct) { DataTypeComponent component = struct.getComponent(struct.getNumComponents() - 1); if (component == null) { return; } assertNotEquals(0, component.getLength()); } }
/* * Copyright 2002-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.http.converter.xml; import java.io.IOException; import java.io.InputStreamReader; import java.io.StringReader; import java.nio.charset.StandardCharsets; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamReader; import javax.xml.transform.Source; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.sax.SAXSource; import javax.xml.transform.stax.StAXSource; import javax.xml.transform.stream.StreamSource; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.xml.sax.InputSource; import org.xml.sax.SAXException; import org.xml.sax.XMLReader; import org.xml.sax.helpers.DefaultHandler; import org.springframework.core.io.ClassPathResource; import org.springframework.core.io.Resource; import org.springframework.core.testfixture.xml.XmlContent; import org.springframework.http.MediaType; import org.springframework.http.MockHttpInputMessage; import org.springframework.http.MockHttpOutputMessage; import org.springframework.http.converter.HttpMessageNotReadableException; import org.springframework.util.FileCopyUtils; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatExceptionOfType; /** * @author Arjen Poutsma * @author Rossen Stoyanchev */ public class SourceHttpMessageConverterTests { private static final String BODY = "<root>Hello World</root>"; private SourceHttpMessageConverter<Source> converter; private String bodyExternal; @BeforeEach public void setup() throws IOException { converter = new SourceHttpMessageConverter<>(); Resource external = new ClassPathResource("external.txt", getClass()); bodyExternal = "<!DOCTYPE root SYSTEM \"https://192.168.28.42/1.jsp\" [" + " <!ELEMENT root ANY >\n" + " <!ENTITY ext SYSTEM \"" + external.getURI() + "\" >]><root>&ext;</root>"; } @Test public void canRead() { assertThat(converter.canRead(Source.class, new MediaType("application", "xml"))).isTrue(); assertThat(converter.canRead(Source.class, new MediaType("application", "soap+xml"))).isTrue(); } @Test public void canWrite() { assertThat(converter.canWrite(Source.class, new MediaType("application", "xml"))).isTrue(); assertThat(converter.canWrite(Source.class, new MediaType("application", "soap+xml"))).isTrue(); assertThat(converter.canWrite(Source.class, MediaType.ALL)).isTrue(); } @Test public void readDOMSource() throws Exception { MockHttpInputMessage inputMessage = new MockHttpInputMessage(BODY.getBytes(StandardCharsets.UTF_8)); inputMessage.getHeaders().setContentType(MediaType.APPLICATION_XML); DOMSource result = (DOMSource) converter.read(DOMSource.class, inputMessage); Document document = (Document) result.getNode(); assertThat(document.getDocumentElement().getLocalName()).as("Invalid result").isEqualTo("root"); } @Test public void readDOMSourceExternal() throws Exception { MockHttpInputMessage inputMessage = new MockHttpInputMessage(bodyExternal.getBytes(StandardCharsets.UTF_8)); inputMessage.getHeaders().setContentType(MediaType.APPLICATION_XML); converter.setSupportDtd(true); DOMSource result = (DOMSource) converter.read(DOMSource.class, inputMessage); Document document = (Document) result.getNode(); assertThat(document.getDocumentElement().getLocalName()).as("Invalid result").isEqualTo("root"); assertThat(document.getDocumentElement().getTextContent()).as("Invalid result").isNotEqualTo("Foo Bar"); } @Test public void readDomSourceWithXmlBomb() throws Exception { // https://en.wikipedia.org/wiki/Billion_laughs // https://msdn.microsoft.com/en-us/magazine/ee335713.aspx String content = "<?xml version=\"1.0\"?>\n" + "<!DOCTYPE lolz [\n" + " <!ENTITY lol \"lol\">\n" + " <!ELEMENT lolz (#PCDATA)>\n" + " <!ENTITY lol1 \"&lol;&lol;&lol;&lol;&lol;&lol;&lol;&lol;&lol;&lol;\">\n" + " <!ENTITY lol2 \"&lol1;&lol1;&lol1;&lol1;&lol1;&lol1;&lol1;&lol1;&lol1;&lol1;\">\n" + " <!ENTITY lol3 \"&lol2;&lol2;&lol2;&lol2;&lol2;&lol2;&lol2;&lol2;&lol2;&lol2;\">\n" + " <!ENTITY lol4 \"&lol3;&lol3;&lol3;&lol3;&lol3;&lol3;&lol3;&lol3;&lol3;&lol3;\">\n" + " <!ENTITY lol5 \"&lol4;&lol4;&lol4;&lol4;&lol4;&lol4;&lol4;&lol4;&lol4;&lol4;\">\n" + " <!ENTITY lol6 \"&lol5;&lol5;&lol5;&lol5;&lol5;&lol5;&lol5;&lol5;&lol5;&lol5;\">\n" + " <!ENTITY lol7 \"&lol6;&lol6;&lol6;&lol6;&lol6;&lol6;&lol6;&lol6;&lol6;&lol6;\">\n" + " <!ENTITY lol8 \"&lol7;&lol7;&lol7;&lol7;&lol7;&lol7;&lol7;&lol7;&lol7;&lol7;\">\n" + " <!ENTITY lol9 \"&lol8;&lol8;&lol8;&lol8;&lol8;&lol8;&lol8;&lol8;&lol8;&lol8;\">\n" + "]>\n" + "<root>&lol9;</root>"; MockHttpInputMessage inputMessage = new MockHttpInputMessage(content.getBytes(StandardCharsets.UTF_8)); assertThatExceptionOfType(HttpMessageNotReadableException.class).isThrownBy(() -> this.converter.read(DOMSource.class, inputMessage)) .withMessageContaining("DOCTYPE"); } @Test public void readSAXSource() throws Exception { MockHttpInputMessage inputMessage = new MockHttpInputMessage(BODY.getBytes(StandardCharsets.UTF_8)); inputMessage.getHeaders().setContentType(MediaType.APPLICATION_XML); SAXSource result = (SAXSource) converter.read(SAXSource.class, inputMessage); InputSource inputSource = result.getInputSource(); String s = FileCopyUtils.copyToString(new InputStreamReader(inputSource.getByteStream())); assertThat(XmlContent.from(s)).isSimilarTo(BODY); } @Test public void readSAXSourceExternal() throws Exception { MockHttpInputMessage inputMessage = new MockHttpInputMessage(bodyExternal.getBytes(StandardCharsets.UTF_8)); inputMessage.getHeaders().setContentType(MediaType.APPLICATION_XML); converter.setSupportDtd(true); SAXSource result = (SAXSource) converter.read(SAXSource.class, inputMessage); InputSource inputSource = result.getInputSource(); XMLReader reader = result.getXMLReader(); reader.setContentHandler(new DefaultHandler() { @Override public void characters(char[] ch, int start, int length) { String s = new String(ch, start, length); assertThat(s).as("Invalid result").isNotEqualTo("Foo Bar"); } }); reader.parse(inputSource); } @Test public void readSAXSourceWithXmlBomb() throws Exception { // https://en.wikipedia.org/wiki/Billion_laughs // https://msdn.microsoft.com/en-us/magazine/ee335713.aspx String content = "<?xml version=\"1.0\"?>\n" + "<!DOCTYPE lolz [\n" + " <!ENTITY lol \"lol\">\n" + " <!ELEMENT lolz (#PCDATA)>\n" + " <!ENTITY lol1 \"&lol;&lol;&lol;&lol;&lol;&lol;&lol;&lol;&lol;&lol;\">\n" + " <!ENTITY lol2 \"&lol1;&lol1;&lol1;&lol1;&lol1;&lol1;&lol1;&lol1;&lol1;&lol1;\">\n" + " <!ENTITY lol3 \"&lol2;&lol2;&lol2;&lol2;&lol2;&lol2;&lol2;&lol2;&lol2;&lol2;\">\n" + " <!ENTITY lol4 \"&lol3;&lol3;&lol3;&lol3;&lol3;&lol3;&lol3;&lol3;&lol3;&lol3;\">\n" + " <!ENTITY lol5 \"&lol4;&lol4;&lol4;&lol4;&lol4;&lol4;&lol4;&lol4;&lol4;&lol4;\">\n" + " <!ENTITY lol6 \"&lol5;&lol5;&lol5;&lol5;&lol5;&lol5;&lol5;&lol5;&lol5;&lol5;\">\n" + " <!ENTITY lol7 \"&lol6;&lol6;&lol6;&lol6;&lol6;&lol6;&lol6;&lol6;&lol6;&lol6;\">\n" + " <!ENTITY lol8 \"&lol7;&lol7;&lol7;&lol7;&lol7;&lol7;&lol7;&lol7;&lol7;&lol7;\">\n" + " <!ENTITY lol9 \"&lol8;&lol8;&lol8;&lol8;&lol8;&lol8;&lol8;&lol8;&lol8;&lol8;\">\n" + "]>\n" + "<root>&lol9;</root>"; MockHttpInputMessage inputMessage = new MockHttpInputMessage(content.getBytes(StandardCharsets.UTF_8)); SAXSource result = (SAXSource) this.converter.read(SAXSource.class, inputMessage); InputSource inputSource = result.getInputSource(); XMLReader reader = result.getXMLReader(); assertThatExceptionOfType(SAXException.class) .isThrownBy(() -> reader.parse(inputSource)).withMessageContaining("DOCTYPE"); } @Test public void readStAXSource() throws Exception { MockHttpInputMessage inputMessage = new MockHttpInputMessage(BODY.getBytes(StandardCharsets.UTF_8)); inputMessage.getHeaders().setContentType(MediaType.APPLICATION_XML); StAXSource result = (StAXSource) converter.read(StAXSource.class, inputMessage); XMLStreamReader streamReader = result.getXMLStreamReader(); assertThat(streamReader.hasNext()).isTrue(); streamReader.nextTag(); String s = streamReader.getLocalName(); assertThat(s).isEqualTo("root"); s = streamReader.getElementText(); assertThat(s).isEqualTo("Hello World"); streamReader.close(); } @Test public void readStAXSourceExternal() throws Exception { MockHttpInputMessage inputMessage = new MockHttpInputMessage(bodyExternal.getBytes(StandardCharsets.UTF_8)); inputMessage.getHeaders().setContentType(MediaType.APPLICATION_XML); converter.setSupportDtd(true); StAXSource result = (StAXSource) converter.read(StAXSource.class, inputMessage); XMLStreamReader streamReader = result.getXMLStreamReader(); assertThat(streamReader.hasNext()).isTrue(); streamReader.next(); streamReader.next(); String s = streamReader.getLocalName(); assertThat(s).isEqualTo("root"); try { s = streamReader.getElementText(); assertThat(s).isNotEqualTo("Foo Bar"); } catch (XMLStreamException ex) { // Some parsers raise a parse exception } streamReader.close(); } @Test public void readStAXSourceWithXmlBomb() throws Exception { // https://en.wikipedia.org/wiki/Billion_laughs // https://msdn.microsoft.com/en-us/magazine/ee335713.aspx String content = "<?xml version=\"1.0\"?>\n" + "<!DOCTYPE lolz [\n" + " <!ENTITY lol \"lol\">\n" + " <!ELEMENT lolz (#PCDATA)>\n" + " <!ENTITY lol1 \"&lol;&lol;&lol;&lol;&lol;&lol;&lol;&lol;&lol;&lol;\">\n" + " <!ENTITY lol2 \"&lol1;&lol1;&lol1;&lol1;&lol1;&lol1;&lol1;&lol1;&lol1;&lol1;\">\n" + " <!ENTITY lol3 \"&lol2;&lol2;&lol2;&lol2;&lol2;&lol2;&lol2;&lol2;&lol2;&lol2;\">\n" + " <!ENTITY lol4 \"&lol3;&lol3;&lol3;&lol3;&lol3;&lol3;&lol3;&lol3;&lol3;&lol3;\">\n" + " <!ENTITY lol5 \"&lol4;&lol4;&lol4;&lol4;&lol4;&lol4;&lol4;&lol4;&lol4;&lol4;\">\n" + " <!ENTITY lol6 \"&lol5;&lol5;&lol5;&lol5;&lol5;&lol5;&lol5;&lol5;&lol5;&lol5;\">\n" + " <!ENTITY lol7 \"&lol6;&lol6;&lol6;&lol6;&lol6;&lol6;&lol6;&lol6;&lol6;&lol6;\">\n" + " <!ENTITY lol8 \"&lol7;&lol7;&lol7;&lol7;&lol7;&lol7;&lol7;&lol7;&lol7;&lol7;\">\n" + " <!ENTITY lol9 \"&lol8;&lol8;&lol8;&lol8;&lol8;&lol8;&lol8;&lol8;&lol8;&lol8;\">\n" + "]>\n" + "<root>&lol9;</root>"; MockHttpInputMessage inputMessage = new MockHttpInputMessage(content.getBytes(StandardCharsets.UTF_8)); StAXSource result = (StAXSource) this.converter.read(StAXSource.class, inputMessage); XMLStreamReader streamReader = result.getXMLStreamReader(); assertThat(streamReader.hasNext()).isTrue(); streamReader.next(); streamReader.next(); String s = streamReader.getLocalName(); assertThat(s).isEqualTo("root"); assertThatExceptionOfType(XMLStreamException.class) .isThrownBy(streamReader::getElementText).withMessageContaining("\"lol9\""); } @Test public void readStreamSource() throws Exception { MockHttpInputMessage inputMessage = new MockHttpInputMessage(BODY.getBytes(StandardCharsets.UTF_8)); inputMessage.getHeaders().setContentType(MediaType.APPLICATION_XML); StreamSource result = (StreamSource) converter.read(StreamSource.class, inputMessage); String s = FileCopyUtils.copyToString(new InputStreamReader(result.getInputStream())); assertThat(XmlContent.of(s)).isSimilarTo(BODY); } @Test public void readSource() throws Exception { MockHttpInputMessage inputMessage = new MockHttpInputMessage(BODY.getBytes(StandardCharsets.UTF_8)); inputMessage.getHeaders().setContentType(MediaType.APPLICATION_XML); converter.read(Source.class, inputMessage); } @Test public void writeDOMSource() throws Exception { DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance(); documentBuilderFactory.setNamespaceAware(true); Document document = documentBuilderFactory.newDocumentBuilder().newDocument(); Element rootElement = document.createElement("root"); document.appendChild(rootElement); rootElement.setTextContent("Hello World"); DOMSource domSource = new DOMSource(document); MockHttpOutputMessage outputMessage = new MockHttpOutputMessage(); converter.write(domSource, null, outputMessage); assertThat(XmlContent.of(outputMessage.getBodyAsString(StandardCharsets.UTF_8))) .isSimilarTo("<root>Hello World</root>"); assertThat(outputMessage.getHeaders().getContentType()) .as("Invalid content-type").isEqualTo(MediaType.APPLICATION_XML); assertThat(outputMessage.getHeaders().getContentLength()) .as("Invalid content-length").isEqualTo(outputMessage.getBodyAsBytes().length); } @Test public void writeSAXSource() throws Exception { String xml = "<root>Hello World</root>"; SAXSource saxSource = new SAXSource(new InputSource(new StringReader(xml))); MockHttpOutputMessage outputMessage = new MockHttpOutputMessage(); converter.write(saxSource, null, outputMessage); assertThat(XmlContent.of(outputMessage.getBodyAsString(StandardCharsets.UTF_8))) .isSimilarTo("<root>Hello World</root>"); assertThat(outputMessage.getHeaders().getContentType()) .as("Invalid content-type").isEqualTo(MediaType.APPLICATION_XML); } @Test public void writeStreamSource() throws Exception { String xml = "<root>Hello World</root>"; StreamSource streamSource = new StreamSource(new StringReader(xml)); MockHttpOutputMessage outputMessage = new MockHttpOutputMessage(); converter.write(streamSource, null, outputMessage); assertThat(XmlContent.of(outputMessage.getBodyAsString(StandardCharsets.UTF_8))) .isSimilarTo("<root>Hello World</root>"); assertThat(outputMessage.getHeaders().getContentType()) .as("Invalid content-type").isEqualTo(MediaType.APPLICATION_XML); } }
/* * Vec3f - small utilities for 3D vectors * * created: mpichler, 19970505 * * changed: kwagen, 19970709 * changed: mpichler, 19970724 * * $Id: Vec3f.java,v 1.8 1997/09/23 12:52:45 apesen Exp $ */ package iicm.utils3d; /** * Vec3f - small utilities for 3D vectors. * Copyright (c) 1997 IICM * * @author Michael Pichler * @version 1.0, changed: 5 May 97 */ public final class Vec3f { /** * 3D vector of values */ public float[] value_ = new float [3]; // thus compatible with native code calls // array indices public static final int X = 0; public static final int Y = 1; public static final int Z = 2; /** * constructor (0, 0, 0) */ public Vec3f () { // initialized by new statement } /** * constructor (x, y, z) */ public Vec3f (float x, float y, float z) { // value_ = { x, y, z }; // illegal syntax float[] v = value_; v[0] = x; v[1] = y; v[2] = z; } /** * constructor (a[0], a[1], a[2]). values are copied into internal array. */ public Vec3f (float[/*3*/] a) { float[] v = value_; v[0] = a[0]; v[1] = a[1]; v[2] = a[2]; } /** * assignment (x, y, z) */ public void assign (float x, float y, float z) { float[] v = value_; v[0] = x; v[1] = y; v[2] = z; } /** * assignment (a[0], a[1], a[2]). values are copied into internal array. */ public void assign (float[/*3*/] a) { float[] v = value_; v[0] = a[0]; v[1] = a[1]; v[2] = a[2]; } /** * assignment (copying, v = b) */ public void assign (Vec3f b) { float[] v = value_; float[] a = b.value_; v[0] = a[0]; v[1] = a[1]; v[2] = a[2]; } /** * negate (v = -v) */ public void negate () { float[] v = value_; v[0] = - v[0]; v[1] = - v[1]; v[2] = - v[2]; } /** * increase (v += b) */ public void increase (Vec3f b) { float[] v = value_; float[] w = b.value_; v[0] += w[0]; v[1] += w[1]; v[2] += w[2]; } public void increase (float[] w) { float[] v = value_; v[0] += w[0]; v[1] += w[1]; v[2] += w[2]; } /** * decrease (v -= b) */ public void decrease (Vec3f b) { float[] v = value_; float[] w = b.value_; v[0] -= w[0]; v[1] -= w[1]; v[2] -= w[2]; } public void decrease (float[] w) { float[] v = value_; v[0] -= w[0]; v[1] -= w[1]; v[2] -= w[2]; } /** * sincrease (v += f * b). increase by a scaled vector */ public void sincrease (float f, Vec3f b) { float[] v = value_; float[] w = b.value_; v[0] += f * w[0]; v[1] += f * w[1]; v[2] += f * w[2]; } public void sincrease (float f, float[] w) { float[] v = value_; v[0] += f * w[0]; v[1] += f * w[1]; v[2] += f * w[2]; } /** * ray equation (a + t * b) */ public void rayat (float[] a, float t, float[] b) { float[] v = value_; v[0] = a[0] + t * b[0]; v[1] = a[1] + t * b[1]; v[2] = a[2] + t * b[2]; } /** * scale by a scalar */ public void scale (float f) { float[] v = value_; v[0] *= f; v[1] *= f; v[2] *= f; } public static void scale (float[/*3*/] v, float f) { v[0] *= f; v[1] *= f; v[2] *= f; } /** * dot product. &lt; a . b &gt; E.g. dot (a, a) is the square norm of a */ public static float dot (Vec3f a, Vec3f b) { float[] u = a.value_; float[] v = b.value_; return u[0] * v[0] + u[1] * v[1] + u[2] * v[2]; } public static float dot (float[] u, float[] v) { return u[0] * v[0] + u[1] * v[1] + u[2] * v[2]; } /** * assign cross product a X b to this vector. * do not call with "this" as either argument. */ public void cross (Vec3f a, Vec3f b) { cross (a.value_, b.value_); } public void cross (float[] u, float[] v) { value_[0] = u[1] * v[2] - u[2] * v[1]; value_[1] = u[2] * v[0] - u[0] * v[2]; value_[2] = u[0] * v[1] - u[1] * v[0]; } /** * normalize. return old length */ public float normalize () { float length = (float) Math.sqrt (dot (this, this)); if (length > 0.0f) scale (1.0f / length); return length; } /** * find a vector that is orthogonal to non-zero vector v. * write result (normalized) into axis */ public static void getOrthogonalVector (float[] v, float[] axis) { // choose vector which comes close to (0, 1, 0) float x = v[0]; float y = v[1]; float z = v[2]; if (Math.abs (x) > Math.abs (z)) // (-y, x, 0) or (y, -x, 0) { if (x > 0.0f) { axis[0] = -y; axis[1] = x; axis[2] = 0; } else { axis[0] = y; axis[1] = -x; axis[2] = 0; } } else // (0, z, -y) or (0, -z, y) { if (z > 0.0f) { axis[0] = 0; axis[1] = z; axis[2] = -y; } else { axis[0] = 0; axis[1] = -z; axis[2] = y; } } float length = (float) Math.sqrt (dot (axis, axis)); if (length > 0.0f) scale (axis, 1.0f / length); else System.err.println ("getOrthogonalVector. internal error on vector " + print (v)); // might return (1, 0, 0) in this case } // getOrthogonalVector /** * get the angle which the xy plane must be rotatated about the * Y-axis (unchanged) such that the new normal vector (Z-axis) * points towards start * @return angle off rotation */ public static float getRotationAngle (Vec3f start) { Vec3f x_axis = new Vec3f (); Vec3f y_axis = new Vec3f (0.0f, 1.0f, 0.0f); // Y axis in local coordinatesystem Vec3f z_axis = new Vec3f (); x_axis.cross (y_axis, start); // finde new X axis normal to plan through Y axis and start z_axis.cross (x_axis, y_axis); x_axis.normalize (); z_axis.normalize (); Vec3f p = new Vec3f (x_axis.value_[2], y_axis.value_[2], z_axis.value_[2]); double alpha = Math.acos (p.value_[2]); if (p.value_[0] < 0) alpha = -alpha; return ((float) alpha); } // getRotationAngle static final float slerpN_epsilon = 0.00001f; /** * spherical linear interpolation of 3D vector. return a vector that * lies "at t between a1 and a2", i.e. a1 for t == 0, a2 for t == 1 * and an interpolation of a1 and a2 for values between 0 and 1. * a1offs and a2offs allow a1 and a2 to start at an offset. * result is written at aoffs into array a. */ public static void slerpNorm (float[] a1, int a1offs, float[] a2, int a2offs, float t, float[] a, int aoffs) { float[] p = { a1 [a1offs], a1 [a1offs+1], a1 [a1offs+2] }; float[] q = { a2 [a2offs], a2 [a2offs+1], a2 [a2offs+2] }; double alpha, beta; double cosom = dot (p, q); if ((cosom + 1.0) > slerpN_epsilon) { if ((1.0 - cosom) > slerpN_epsilon) // normal case: slerp { double omega = Math.acos (cosom); double sinom = Math.sin (omega); alpha = Math.sin ((1.0 - t) * omega) / sinom; beta = Math.sin (t * omega) / sinom; } else // vectors (nearly) coincide: linear interpolation { alpha = 1.0 - t; beta = t; } } else // vectors (nearly) opposite: go over orthogonal vector in between { float[] v = new float[3]; getOrthogonalVector (p, v); if (t < 0.5) { alpha = Math.sin ((1 - 2*t)*Math.PI/2); beta = Math.sin (t*Math.PI); q = v; } else { alpha = Math.sin ((1 - t)*Math.PI); beta = Math.sin ((2*t - 1)*Math.PI/2); p = v; } } for (int i = 0; i < 3; i++) a[aoffs+i] = (float) (alpha*p[i] + beta*q[i]); } // slerpNorm /** * conversion to string: "(x, y, z)". E.g.: System.out.println (v); */ public String toString () { return print (value_); } /** * print a 3D float array (debugging tool) */ public static String print (float[/*3*/] v) { return "(" + v[0] + ", " + v[1] + ", " + v[2] + ")"; } } // Vec3f
/** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.master; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.IOException; import java.util.Collection; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.MediumTests; import org.apache.hadoop.hbase.MiniHBaseCluster; import org.apache.hadoop.hbase.TableDescriptors; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.regionserver.HRegionServer; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Threads; import org.junit.AfterClass; import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; import org.mockito.Mockito; import org.mockito.internal.util.reflection.Whitebox; /** * Test open and close of regions using zk. */ @Category(MediumTests.class) public class TestZKBasedOpenCloseRegion { private static final Log LOG = LogFactory.getLog(TestZKBasedOpenCloseRegion.class); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static final TableName TABLENAME = TableName.valueOf("TestZKBasedOpenCloseRegion"); private static final byte [][] FAMILIES = new byte [][] {Bytes.toBytes("a"), Bytes.toBytes("b"), Bytes.toBytes("c")}; private static int countOfRegions; @BeforeClass public static void beforeAllTests() throws Exception { Configuration c = TEST_UTIL.getConfiguration(); c.setBoolean("dfs.support.append", true); c.setInt("hbase.regionserver.info.port", 0); TEST_UTIL.startMiniCluster(2); TEST_UTIL.createTable(TABLENAME, FAMILIES); HTable t = new HTable(TEST_UTIL.getConfiguration(), TABLENAME); countOfRegions = TEST_UTIL.createMultiRegions(t, getTestFamily()); waitUntilAllRegionsAssigned(); addToEachStartKey(countOfRegions); t.close(); TEST_UTIL.getHBaseCluster().getMaster().assignmentManager.initializeHandlerTrackers(); } @AfterClass public static void afterAllTests() throws Exception { TEST_UTIL.shutdownMiniCluster(); } @Before public void setup() throws IOException { if (TEST_UTIL.getHBaseCluster().getLiveRegionServerThreads().size() < 2) { // Need at least two servers. LOG.info("Started new server=" + TEST_UTIL.getHBaseCluster().startRegionServer()); } waitUntilAllRegionsAssigned(); waitOnRIT(); } /** * Test we reopen a region once closed. * @throws Exception */ @Test (timeout=300000) public void testReOpenRegion() throws Exception { MiniHBaseCluster cluster = TEST_UTIL.getHBaseCluster(); LOG.info("Number of region servers = " + cluster.getLiveRegionServerThreads().size()); int rsIdx = 0; HRegionServer regionServer = TEST_UTIL.getHBaseCluster().getRegionServer(rsIdx); HRegionInfo hri = getNonMetaRegion( ProtobufUtil.getOnlineRegions(regionServer.getRSRpcServices())); LOG.debug("Asking RS to close region " + hri.getRegionNameAsString()); LOG.info("Unassign " + hri.getRegionNameAsString()); cluster.getMaster().assignmentManager.unassign(hri); while (!cluster.getMaster().assignmentManager.wasClosedHandlerCalled(hri)) { Threads.sleep(100); } while (!cluster.getMaster().assignmentManager.wasOpenedHandlerCalled(hri)) { Threads.sleep(100); } LOG.info("Done with testReOpenRegion"); } private HRegionInfo getNonMetaRegion(final Collection<HRegionInfo> regions) { HRegionInfo hri = null; for (HRegionInfo i: regions) { LOG.info(i.getRegionNameAsString()); if (!i.isMetaRegion()) { hri = i; break; } } return hri; } /** * This test shows how a region won't be able to be assigned to a RS * if it's already "processing" it. * @throws Exception */ @Test public void testRSAlreadyProcessingRegion() throws Exception { LOG.info("starting testRSAlreadyProcessingRegion"); MiniHBaseCluster cluster = TEST_UTIL.getHBaseCluster(); HRegionServer hr0 = cluster.getLiveRegionServerThreads().get(0).getRegionServer(); HRegionServer hr1 = cluster.getLiveRegionServerThreads().get(1).getRegionServer(); HRegionInfo hri = getNonMetaRegion(ProtobufUtil.getOnlineRegions(hr0.getRSRpcServices())); // fake that hr1 is processing the region hr1.getRegionsInTransitionInRS().putIfAbsent(hri.getEncodedNameAsBytes(), true); // now ask the master to move the region to hr1, will fail TEST_UTIL.getHBaseAdmin().move(hri.getEncodedNameAsBytes(), Bytes.toBytes(hr1.getServerName().toString())); // make sure the region came back assertEquals(hr1.getOnlineRegion(hri.getEncodedNameAsBytes()), null); // remove the block and reset the boolean hr1.getRegionsInTransitionInRS().remove(hri.getEncodedNameAsBytes()); // now try moving a region when there is no region in transition. hri = getNonMetaRegion(ProtobufUtil.getOnlineRegions(hr1.getRSRpcServices())); TEST_UTIL.getHBaseAdmin().move(hri.getEncodedNameAsBytes(), Bytes.toBytes(hr0.getServerName().toString())); while (!cluster.getMaster().assignmentManager.wasOpenedHandlerCalled(hri)) { Threads.sleep(100); } // make sure the region has moved from the original RS assertTrue(hr1.getOnlineRegion(hri.getEncodedNameAsBytes()) == null); } @Test (timeout=300000) public void testCloseRegion() throws Exception { LOG.info("Running testCloseRegion"); MiniHBaseCluster cluster = TEST_UTIL.getHBaseCluster(); LOG.info("Number of region servers = " + cluster.getLiveRegionServerThreads().size()); int rsIdx = 0; HRegionServer regionServer = TEST_UTIL.getHBaseCluster().getRegionServer(rsIdx); HRegionInfo hri = getNonMetaRegion( ProtobufUtil.getOnlineRegions(regionServer.getRSRpcServices())); LOG.debug("Asking RS to close region " + hri.getRegionNameAsString()); cluster.getMaster().assignmentManager.unassign(hri); while (!cluster.getMaster().assignmentManager.wasClosedHandlerCalled(hri)) { Threads.sleep(100); } LOG.info("Done with testCloseRegion"); } private void waitOnRIT() { // Close worked but we are going to open the region elsewhere. Before going on, make sure // this completes. while (TEST_UTIL.getHBaseCluster().getMaster().getAssignmentManager(). getRegionStates().isRegionsInTransition()) { LOG.info("Waiting on regions in transition: " + TEST_UTIL.getHBaseCluster().getMaster().getAssignmentManager(). getRegionStates().getRegionsInTransition()); Threads.sleep(10); } } /** * If region open fails with IOException in openRegion() while doing tableDescriptors.get() * the region should not add into regionsInTransitionInRS map * @throws Exception */ @Test public void testRegionOpenFailsDueToIOException() throws Exception { HRegionInfo REGIONINFO = new HRegionInfo(TableName.valueOf("t"), HConstants.EMPTY_START_ROW, HConstants.EMPTY_START_ROW); HRegionServer regionServer = TEST_UTIL.getHBaseCluster().getRegionServer(0); TableDescriptors htd = Mockito.mock(TableDescriptors.class); Object orizinalState = Whitebox.getInternalState(regionServer,"tableDescriptors"); Whitebox.setInternalState(regionServer, "tableDescriptors", htd); Mockito.doThrow(new IOException()).when(htd).get((TableName) Mockito.any()); try { ProtobufUtil.openRegion(regionServer.getRSRpcServices(), regionServer.getServerName(), REGIONINFO); fail("It should throw IOException "); } catch (IOException e) { } Whitebox.setInternalState(regionServer, "tableDescriptors", orizinalState); assertFalse("Region should not be in RIT", regionServer.getRegionsInTransitionInRS().containsKey(REGIONINFO.getEncodedNameAsBytes())); } private static void waitUntilAllRegionsAssigned() throws IOException { HTable meta = new HTable(TEST_UTIL.getConfiguration(), TableName.META_TABLE_NAME); while (true) { int rows = 0; Scan scan = new Scan(); scan.addColumn(HConstants.CATALOG_FAMILY, HConstants.SERVER_QUALIFIER); ResultScanner s = meta.getScanner(scan); for (Result r = null; (r = s.next()) != null;) { byte [] b = r.getValue(HConstants.CATALOG_FAMILY, HConstants.SERVER_QUALIFIER); if (b == null || b.length <= 0) { break; } rows++; } s.close(); // If I get to here and all rows have a Server, then all have been assigned. if (rows >= countOfRegions) { break; } LOG.info("Found=" + rows); Threads.sleep(1000); } meta.close(); } /* * Add to each of the regions in hbase:meta a value. Key is the startrow of the * region (except its 'aaa' for first region). Actual value is the row name. * @param expected * @return * @throws IOException */ private static int addToEachStartKey(final int expected) throws IOException { HTable t = new HTable(TEST_UTIL.getConfiguration(), TABLENAME); HTable meta = new HTable(TEST_UTIL.getConfiguration(), TableName.META_TABLE_NAME); int rows = 0; Scan scan = new Scan(); scan.addColumn(HConstants.CATALOG_FAMILY, HConstants.REGIONINFO_QUALIFIER); ResultScanner s = meta.getScanner(scan); for (Result r = null; (r = s.next()) != null;) { HRegionInfo hri = HRegionInfo.getHRegionInfo(r); if (hri == null) break; if(!hri.getTable().equals(TABLENAME)) { continue; } // If start key, add 'aaa'. byte [] row = getStartKey(hri); Put p = new Put(row); p.setDurability(Durability.SKIP_WAL); p.add(getTestFamily(), getTestQualifier(), row); t.put(p); rows++; } s.close(); Assert.assertEquals(expected, rows); t.close(); meta.close(); return rows; } private static byte [] getStartKey(final HRegionInfo hri) { return Bytes.equals(HConstants.EMPTY_START_ROW, hri.getStartKey())? Bytes.toBytes("aaa"): hri.getStartKey(); } private static byte [] getTestFamily() { return FAMILIES[0]; } private static byte [] getTestQualifier() { return getTestFamily(); } public static void main(String args[]) throws Exception { TestZKBasedOpenCloseRegion.beforeAllTests(); TestZKBasedOpenCloseRegion test = new TestZKBasedOpenCloseRegion(); test.setup(); test.testCloseRegion(); TestZKBasedOpenCloseRegion.afterAllTests(); } }
/* * Copyright (C) 2007-2008 Esmertec AG. Copyright (C) 2007-2008 The Android Open * Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package info.guardianproject.otr.app.im.app; import info.guardianproject.otr.app.im.IChatSession; import info.guardianproject.otr.app.im.IChatSessionManager; import info.guardianproject.otr.app.im.IImConnection; import info.guardianproject.otr.app.im.R; import info.guardianproject.otr.app.im.app.ContactListFilterView.ContactListListener; import info.guardianproject.otr.app.im.plugin.BrandingResourceIDs; import info.guardianproject.otr.app.im.provider.Imps; import info.guardianproject.otr.app.im.service.ImServiceConstants; import info.guardianproject.util.LogCleaner; import java.util.Observable; import java.util.Observer; import android.app.Activity; import android.app.SearchManager; import android.content.ContentResolver; import android.content.ContentUris; import android.content.ContentValues; import android.content.Context; import android.content.Intent; import android.database.Cursor; import android.net.Uri; import android.os.Bundle; import android.os.Message; import android.os.RemoteException; import android.support.v4.widget.SearchViewCompat; import android.util.AttributeSet; import android.util.Log; import android.view.ContextMenu; import android.view.ContextMenu.ContextMenuInfo; import android.view.KeyEvent; import android.view.LayoutInflater; import android.view.MenuItem.OnMenuItemClickListener; import android.view.View; import android.view.ViewGroup; import android.view.inputmethod.InputMethodManager; import android.widget.AdapterView; import android.widget.AdapterView.AdapterContextMenuInfo; import android.widget.CursorAdapter; import android.widget.ExpandableListView.ExpandableListContextMenuInfo; import com.actionbarsherlock.view.Menu; import com.actionbarsherlock.view.MenuInflater; import com.actionbarsherlock.view.MenuItem; public class ContactListActivity extends ThemeableActivity implements View.OnCreateContextMenuListener, ContactListListener { private static final int MENU_START_CONVERSATION = Menu.FIRST; private static final int MENU_VIEW_PROFILE = Menu.FIRST + 1; private static final int MENU_BLOCK_CONTACT = Menu.FIRST + 2; private static final int MENU_DELETE_CONTACT = Menu.FIRST + 3; private static final int MENU_END_CONVERSATION = Menu.FIRST + 4; private static final String FILTER_STATE_KEY = "Filtering"; ImApp mApp; long mProviderId; long mAccountId; IImConnection mConn; ContactListView mContactListView; ContactListFilterView mFilterView; SimpleAlertHandler mHandler; ContextMenuHandler mContextMenuHandler; boolean mIsFiltering = true; Imps.ProviderSettings.QueryMap mGlobalSettingMap; boolean mDestroyed; View mSearchView; @Override protected void onCreate(Bundle icicle) { super.onCreate(icicle); LayoutInflater inflate = getLayoutInflater(); mContactListView = (ContactListView) inflate.inflate(R.layout.contact_list_view, null); mFilterView = (ContactListFilterView) getLayoutInflater().inflate( R.layout.contact_list_filter_view, null); mFilterView.setListener(this); mFilterView.getListView().setOnCreateContextMenuListener(this); getSherlock().getActionBar().setHomeButtonEnabled(true); getSherlock().getActionBar().setDisplayHomeAsUpEnabled(true); Intent intent = getIntent(); mAccountId = intent.getLongExtra(ImServiceConstants.EXTRA_INTENT_ACCOUNT_ID, -1); if (mAccountId == -1) { finish(); return; } mApp = (ImApp)getApplication(); } private void initAccount () { ContentResolver cr = getContentResolver(); Cursor c = cr.query(ContentUris.withAppendedId(Imps.Account.CONTENT_URI, mAccountId), null, null, null, null); if (c == null) { //finish(); return; } if (!c.moveToFirst()) { // c.close(); //finish(); return; } mProviderId = c.getLong(c.getColumnIndexOrThrow(Imps.Account.PROVIDER)); mHandler = new MyHandler(this); String username = c.getString(c.getColumnIndexOrThrow(Imps.Account.USERNAME)); //c.close(); // BrandingResources brandingRes = mApp.getBrandingResource(mProviderId); // setTitle(brandingRes.getString(BrandingResourceIDs.STRING_BUDDY_LIST_TITLE, username)); setTitle(username); // getWindow().setFeatureDrawable(Window.FEATURE_LEFT_ICON, // brandingRes.getDrawable(BrandingResourceIDs.DRAWABLE_LOGO)); mGlobalSettingMap = new Imps.ProviderSettings.QueryMap(getContentResolver(), true, null); mApp.callWhenServiceConnected(mHandler, new Runnable() { public void run() { if (!mDestroyed) { mApp.dismissNotifications(mProviderId); mConn = mApp.getConnection(mProviderId); if (mConn == null) { clearConnectionStatus(); try { mConn = mApp.createConnection(mProviderId, mAccountId); } catch (RemoteException e) { Log.e(ImApp.LOG_TAG, "The connection cannot be created"); // finish(); } } // mFilterView.mPresenceView.setConnection(mConn); mFilterView.setConnection(mConn); mContactListView.setConnection(mConn); mContactListView.setHideOfflineContacts(mGlobalSettingMap .getHideOfflineContacts()); } } }); mContextMenuHandler = new ContextMenuHandler(); mContactListView.getListView().setOnCreateContextMenuListener(this); mGlobalSettingMap.addObserver(new Observer() { public void update(Observable observed, Object updateData) { if (!mDestroyed) { mContactListView.setHideOfflineContacts(mGlobalSettingMap .getHideOfflineContacts()); } } }); showFilterView(); } @Override public boolean onCreateOptionsMenu(Menu menu) { MenuInflater inflater = getSupportMenuInflater(); inflater.inflate(R.menu.contact_list_menu, menu); mSearchView = SearchViewCompat.newSearchView(this); if (mSearchView != null) { MenuItem item = menu.add("Search") .setIcon(android.R.drawable.ic_menu_search) .setActionView(mSearchView); item.setShowAsAction(MenuItem.SHOW_AS_ACTION_ALWAYS | MenuItem.SHOW_AS_ACTION_COLLAPSE_ACTION_VIEW); SearchViewCompat.setOnQueryTextListener(mSearchView, new SearchViewCompat.OnQueryTextListenerCompat() { @Override public boolean onQueryTextChange(String newText) { mFilterView.doFilter(newText); return true; } @Override public boolean onQueryTextSubmit(String query) { mFilterView.doFilter(query); return true; } }); } return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { //TODO make sure this works case R.id.menu_invite_user: Intent i = new Intent(ContactListActivity.this, AddContactActivity.class); i.putExtra(ImServiceConstants.EXTRA_INTENT_PROVIDER_ID, mProviderId); i.putExtra(ImServiceConstants.EXTRA_INTENT_ACCOUNT_ID, mAccountId); i.putExtra(ImServiceConstants.EXTRA_INTENT_LIST_NAME, mContactListView.getSelectedContactList()); startActivity(i); return true; case android.R.id.home: case R.id.menu_view_accounts: startActivity(new Intent(getBaseContext(), ChooseAccountActivity.class)); // finish(); return true; case R.id.menu_settings: Intent sintent = new Intent(this, SettingActivity.class); startActivity(sintent); return true; case R.id.menu_view_groups: if (mIsFiltering) showContactListView(); else showFilterView(); return true; } return super.onOptionsItemSelected(item); } Intent getEditAccountIntent(boolean isSignedIn) { Uri uri = ContentUris.withAppendedId(Imps.Provider.CONTENT_URI, mProviderId); @SuppressWarnings("deprecation") Cursor cursor = managedQuery(uri, new String[] { Imps.Provider.CATEGORY }, null, null, null); cursor.moveToFirst(); Intent intent = new Intent(Intent.ACTION_EDIT, ContentUris.withAppendedId( Imps.Account.CONTENT_URI, mAccountId)); intent.addCategory(cursor.getString(0)); // cursor.close(); intent.putExtra("isSignedIn", isSignedIn); return intent; } @Override protected void onSaveInstanceState(Bundle outState) { super.onSaveInstanceState(outState); outState.putBoolean(FILTER_STATE_KEY, mIsFiltering); } @Override protected void onRestoreInstanceState(Bundle savedInstanceState) { super.onRestoreInstanceState(savedInstanceState); boolean isFiltering = savedInstanceState.getBoolean(FILTER_STATE_KEY); if (isFiltering) { showFilterView(); } } @Override public boolean dispatchKeyEvent(KeyEvent event) { int keyCode = event.getKeyCode(); boolean handled = false; if (!mIsFiltering) { handled = mFilterView.dispatchKeyEvent(event); if (!handled && (KeyEvent.KEYCODE_BACK == keyCode) && (KeyEvent.ACTION_DOWN == event.getAction())) { showFilterView(); handled = true; } } else { handled = mContactListView.dispatchKeyEvent(event); if (!handled && KeyEvent.KEYCODE_SEARCH == keyCode && (KeyEvent.ACTION_DOWN == event.getAction())) { InputMethodManager inputMgr = (InputMethodManager) getSystemService(Context.INPUT_METHOD_SERVICE); inputMgr.toggleSoftInput(0, 0); if (!mIsFiltering) showFilterView(); onSearchRequested(); } else if (!handled && isReadable(keyCode, event) && (KeyEvent.ACTION_DOWN == event.getAction())) { if (!mIsFiltering) showFilterView(); handled = mFilterView.dispatchKeyEvent(event); } } if (!handled) { handled = super.dispatchKeyEvent(event); } return handled; } @Override protected void onNewIntent(Intent intent) { // The user has probably entered a URL into "Go" String action = intent.getAction(); if (Intent.ACTION_SEARCH.equals(action)) { if (mIsFiltering) { String filterText = intent.getStringExtra(SearchManager.QUERY); mFilterView.doFilter(filterText); } } } private static boolean isReadable(int keyCode, KeyEvent event) { if (KeyEvent.isModifierKey(keyCode) || event.isSystem()) { return false; } switch (keyCode) { case KeyEvent.KEYCODE_DPAD_CENTER: case KeyEvent.KEYCODE_DPAD_DOWN: case KeyEvent.KEYCODE_DPAD_LEFT: case KeyEvent.KEYCODE_DPAD_RIGHT: case KeyEvent.KEYCODE_DPAD_UP: case KeyEvent.KEYCODE_ENTER: return false; } return true; } private void showFilterView() { if (mGlobalSettingMap == null) return; Uri uri = mGlobalSettingMap.getHideOfflineContacts() ? Imps.Contacts.CONTENT_URI_ONLINE_CONTACTS_BY : Imps.Contacts.CONTENT_URI_CONTACTS_BY; uri = ContentUris.withAppendedId(uri, mProviderId); uri = ContentUris.withAppendedId(uri, mAccountId); mFilterView.doFilter(uri, null); setContentView(mFilterView); mFilterView.requestFocus(); mIsFiltering = true; } void showContactListView() { setContentView(mContactListView); mContactListView.requestFocus(); mContactListView.invalidate(); mIsFiltering = false; } @Override protected void onPause() { super.onPause(); mApp.unregisterForConnEvents(mHandler); } @Override protected void onResume() { super.onResume(); mApp = (ImApp)getApplication(); mApp.startImServiceIfNeed(); mApp.setAppTheme(this); initAccount (); mApp.registerForConnEvents(mHandler); mContactListView.setAutoRefreshContacts(true); // Get the intent, verify the action and get the query showFilterView(); Intent intent = getIntent(); if (intent.getAction() != null && Intent.ACTION_SEARCH.equals(intent.getAction())) { if (mIsFiltering) { String filterText = intent.getStringExtra(SearchManager.QUERY); mFilterView.doFilter(filterText); } } } @Override protected void onDestroy() { mDestroyed = true; // set connection to null to unregister listeners. mContactListView.setConnection(null); mFilterView.setConnection(null); if (mGlobalSettingMap != null) { mGlobalSettingMap.close(); } super.onDestroy(); } static void log(String msg) { Log.v(ImApp.LOG_TAG, "<ContactListActivity> " + msg); } @Override public void onCreateContextMenu(ContextMenu menu, View v, ContextMenuInfo menuInfo) { boolean chatSelected = false; boolean contactSelected = false; Cursor contactCursor; if (mIsFiltering) { AdapterView.AdapterContextMenuInfo info = (AdapterContextMenuInfo) menuInfo; mContextMenuHandler.mPosition = info.position; contactSelected = true; contactCursor = mFilterView.getContactAtPosition(info.position); } else { ExpandableListContextMenuInfo info = (ExpandableListContextMenuInfo) menuInfo; mContextMenuHandler.mPosition = info.packedPosition; contactSelected = mContactListView.isContactAtPosition(info.packedPosition); contactCursor = mContactListView.getContactAtPosition(info.packedPosition); } boolean allowBlock = true; if (contactCursor != null) { //XXX HACK: Yahoo! doesn't allow to block a friend. We can only block a temporary contact. ProviderDef provider = mApp.getProvider(mProviderId); if (Imps.ProviderNames.YAHOO.equals(provider.mName)) { int type = contactCursor.getInt(contactCursor .getColumnIndexOrThrow(Imps.Contacts.TYPE)); allowBlock = (type == Imps.Contacts.TYPE_TEMPORARY); } int nickNameIndex = contactCursor.getColumnIndexOrThrow(Imps.Contacts.NICKNAME); menu.setHeaderTitle(contactCursor.getString(nickNameIndex)); } BrandingResources brandingRes = mApp.getBrandingResource(mProviderId); String menu_end_conversation = brandingRes .getString(BrandingResourceIDs.STRING_MENU_END_CHAT); String menu_view_profile = brandingRes .getString(BrandingResourceIDs.STRING_MENU_VIEW_PROFILE); String menu_block_contact = brandingRes .getString(BrandingResourceIDs.STRING_MENU_BLOCK_CONTACT); String menu_start_conversation = brandingRes .getString(BrandingResourceIDs.STRING_MENU_START_CHAT); String menu_delete_contact = brandingRes .getString(BrandingResourceIDs.STRING_MENU_DELETE_CONTACT); if (chatSelected) { menu.add(0, MENU_END_CONVERSATION, 0, menu_end_conversation) .setOnMenuItemClickListener(mContextMenuHandler); menu.add(0, MENU_VIEW_PROFILE, 0, menu_view_profile) .setIcon(R.drawable.ic_menu_my_profile) .setOnMenuItemClickListener(mContextMenuHandler); if (allowBlock) { menu.add(0, MENU_BLOCK_CONTACT, 0, menu_block_contact) .setOnMenuItemClickListener(mContextMenuHandler); } } else if (contactSelected) { menu.add(0, MENU_START_CONVERSATION, 0, menu_start_conversation) .setOnMenuItemClickListener(mContextMenuHandler); menu.add(0, MENU_VIEW_PROFILE, 0, menu_view_profile) .setIcon(R.drawable.ic_menu_view_profile) .setOnMenuItemClickListener(mContextMenuHandler); if (allowBlock) { menu.add(0, MENU_BLOCK_CONTACT, 0, menu_block_contact) .setOnMenuItemClickListener(mContextMenuHandler); } menu.add(0, MENU_DELETE_CONTACT, 0, menu_delete_contact) .setIcon(android.R.drawable.ic_menu_delete) .setOnMenuItemClickListener(mContextMenuHandler); } // contactCursor.close(); } void clearConnectionStatus() { ContentResolver cr = getContentResolver(); ContentValues values = new ContentValues(3); values.put(Imps.AccountStatus.ACCOUNT, mAccountId); values.put(Imps.AccountStatus.PRESENCE_STATUS, Imps.Presence.OFFLINE); values.put(Imps.AccountStatus.CONNECTION_STATUS, Imps.ConnectionStatus.OFFLINE); // insert on the "account_status" uri actually replaces the existing value cr.insert(Imps.AccountStatus.CONTENT_URI, values); } final class ContextMenuHandler implements MenuItem.OnMenuItemClickListener, OnMenuItemClickListener { long mPosition; public boolean onMenuItemClick(MenuItem item) { Cursor c; if (mIsFiltering) { c = mFilterView.getContactAtPosition((int) mPosition); } else { c = mContactListView.getContactAtPosition(mPosition); } switch (item.getItemId()) { case MENU_START_CONVERSATION: mContactListView.startChat(c); break; case MENU_VIEW_PROFILE: mContactListView.viewContactPresence(c); break; case MENU_BLOCK_CONTACT: mContactListView.blockContact(c); break; case MENU_DELETE_CONTACT: mContactListView.removeContact(c); break; case MENU_END_CONVERSATION: mContactListView.endChat(c); break; default: return false; } if (mIsFiltering) { showContactListView(); } return true; } @Override public boolean onMenuItemClick(android.view.MenuItem item) { Cursor c; if (mIsFiltering) { c = mFilterView.getContactAtPosition((int) mPosition); } else { c = mContactListView.getContactAtPosition(mPosition); } switch (item.getItemId()) { case MENU_START_CONVERSATION: mContactListView.startChat(c); break; case MENU_VIEW_PROFILE: mContactListView.viewContactPresence(c); break; case MENU_BLOCK_CONTACT: mContactListView.blockContact(c); break; case MENU_DELETE_CONTACT: mContactListView.removeContact(c); break; case MENU_END_CONVERSATION: mContactListView.endChat(c); break; default: return false; } if (mIsFiltering) { showContactListView(); } return true; } } final class MyHandler extends SimpleAlertHandler { public MyHandler(Activity activity) { super(activity); } @Override public void handleMessage(Message msg) { if (msg.what == ImApp.EVENT_CONNECTION_DISCONNECTED) { if (Log.isLoggable(ImApp.LOG_TAG, Log.DEBUG)) { log("Handle event connection disconnected."); } promptDisconnectedEvent(msg); long providerId = ((long) msg.arg1 << 32) | msg.arg2; if (providerId == mProviderId) { if (Log.isLoggable(ImApp.LOG_TAG, Log.DEBUG)) { log("Current connection disconnected, finish"); } startActivity(getEditAccountIntent(false)); // finish(); } return; } super.handleMessage(msg); } } public boolean onClose() { return false; } protected boolean isAlwaysExpanded() { return false; } private static final String[] PROVIDER_PROJECTION = { Imps.Provider._ID, Imps.Provider.NAME, Imps.Provider.FULLNAME, Imps.Provider.CATEGORY, Imps.Provider.ACTIVE_ACCOUNT_ID, Imps.Provider.ACTIVE_ACCOUNT_USERNAME, Imps.Provider.ACTIVE_ACCOUNT_PW, Imps.Provider.ACTIVE_ACCOUNT_LOCKED, Imps.Provider.ACTIVE_ACCOUNT_KEEP_SIGNED_IN, Imps.Provider.ACCOUNT_PRESENCE_STATUS, Imps.Provider.ACCOUNT_CONNECTION_STATUS, }; static final int PROVIDER_ID_COLUMN = 0; static final int PROVIDER_NAME_COLUMN = 1; static final int PROVIDER_FULLNAME_COLUMN = 2; static final int PROVIDER_CATEGORY_COLUMN = 3; static final int ACTIVE_ACCOUNT_ID_COLUMN = 4; static final int ACTIVE_ACCOUNT_USERNAME_COLUMN = 5; static final int ACTIVE_ACCOUNT_PW_COLUMN = 6; static final int ACTIVE_ACCOUNT_LOCKED = 7; static final int ACTIVE_ACCOUNT_KEEP_SIGNED_IN = 8; static final int ACCOUNT_PRESENCE_STATUS = 9; static final int ACCOUNT_CONNECTION_STATUS = 10; @Override public void startChat(Cursor c) { if (c != null) { long id = c.getLong(c.getColumnIndexOrThrow(Imps.Contacts._ID)); String username = c.getString(c.getColumnIndexOrThrow(Imps.Contacts.USERNAME)); try { IChatSessionManager manager = mConn.getChatSessionManager(); IChatSession session = manager.getChatSession(username); if (session == null) { manager.createChatSession(username); } Uri data = ContentUris.withAppendedId(Imps.Chats.CONTENT_URI, id); Intent i = new Intent(Intent.ACTION_VIEW, data); i.addCategory(ImApp.IMPS_CATEGORY); startActivity(i); // mScreen.finish(); //mContactListView.setAutoRefreshContacts(false); } catch (RemoteException e) { mHandler.showServiceErrorAlert(e.getLocalizedMessage()); LogCleaner.error(ImApp.LOG_TAG, "remote error",e); } } } public void showProfile (Cursor c){} }
/* * Copyright 2014 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.executor; import azkaban.Constants; import azkaban.Constants.ConfigurationKeys; import azkaban.event.EventHandler; import azkaban.executor.selector.ExecutorComparator; import azkaban.executor.selector.ExecutorFilter; import azkaban.executor.selector.ExecutorSelector; import azkaban.flow.FlowUtils; import azkaban.metrics.CommonMetrics; import azkaban.project.Project; import azkaban.project.ProjectWhitelist; import azkaban.utils.FileIOUtils.LogData; import azkaban.utils.Pair; import azkaban.utils.Props; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.Lists; import java.io.File; import java.io.IOException; import java.lang.Thread.State; import java.time.Duration; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.TreeMap; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import javax.inject.Inject; import javax.inject.Singleton; import org.apache.commons.lang.StringUtils; import org.apache.log4j.Logger; import org.joda.time.DateTime; /** * Executor manager used to manage the client side job. * * @deprecated replaced by {@link ExecutionController} */ @Singleton @Deprecated public class ExecutorManager extends EventHandler implements ExecutorManagerAdapter { // 12 weeks private static final long DEFAULT_EXECUTION_LOGS_RETENTION_MS = 3 * 4 * 7 * 24 * 60 * 60 * 1000L; private static final Duration RECENTLY_FINISHED_LIFETIME = Duration.ofMinutes(10); private static final Logger logger = Logger.getLogger(ExecutorManager.class); private final RunningExecutions runningExecutions; private final Props azkProps; private final CommonMetrics commonMetrics; private final ExecutorLoader executorLoader; private final RunningExecutionsUpdaterThread updaterThread; private final ExecutorApiGateway apiGateway; private final int maxConcurrentRunsOneFlow; private final Map<Pair<String, String>, Integer> maxConcurrentRunsPerFlowMap; private final ExecutorManagerUpdaterStage updaterStage; private final ExecutionFinalizer executionFinalizer; private final ActiveExecutors activeExecutors; private final ExecutorService executorInfoRefresherService; QueuedExecutions queuedFlows; File cacheDir; private QueueProcessorThread queueProcessor; private volatile Pair<ExecutionReference, ExecutableFlow> runningCandidate = null; private List<String> filterList; private Map<String, Integer> comparatorWeightsMap; private long lastSuccessfulExecutorInfoRefresh; private Duration sleepAfterDispatchFailure = Duration.ofSeconds(1L); private boolean initialized = false; @Inject public ExecutorManager(final Props azkProps, final ExecutorLoader executorLoader, final CommonMetrics commonMetrics, final ExecutorApiGateway apiGateway, final RunningExecutions runningExecutions, final ActiveExecutors activeExecutors, final ExecutorManagerUpdaterStage updaterStage, final ExecutionFinalizer executionFinalizer, final RunningExecutionsUpdaterThread updaterThread) throws ExecutorManagerException { this.azkProps = azkProps; this.commonMetrics = commonMetrics; this.executorLoader = executorLoader; this.apiGateway = apiGateway; this.runningExecutions = runningExecutions; this.activeExecutors = activeExecutors; this.updaterStage = updaterStage; this.executionFinalizer = executionFinalizer; this.updaterThread = updaterThread; this.maxConcurrentRunsOneFlow = ExecutorUtils.getMaxConcurrentRunsOneFlow(azkProps); this.maxConcurrentRunsPerFlowMap = ExecutorUtils.getMaxConcurentRunsPerFlowMap(azkProps); this.executorInfoRefresherService = createExecutorInfoRefresherService(); } void initialize() throws ExecutorManagerException { if (this.initialized) { return; } this.initialized = true; this.setupExecutors(); this.loadRunningExecutions(); this.queuedFlows = new QueuedExecutions( this.azkProps.getLong(ConfigurationKeys.WEBSERVER_QUEUE_SIZE, 100000)); this.loadQueuedFlows(); this.cacheDir = new File(this.azkProps.getString("cache.directory", "cache")); // TODO extract QueueProcessor as a separate class, move all of this into it setupExecutotrComparatorWeightsMap(); setupExecutorFilterList(); this.queueProcessor = setupQueueProcessor(); } @Override public void start() throws ExecutorManagerException { initialize(); this.updaterThread.start(); this.queueProcessor.start(); } private QueueProcessorThread setupQueueProcessor() { return new QueueProcessorThread( this.azkProps.getBoolean(Constants.ConfigurationKeys.QUEUEPROCESSING_ENABLED, true), this.azkProps.getLong(Constants.ConfigurationKeys.ACTIVE_EXECUTOR_REFRESH_IN_MS, 50000), this.azkProps.getInt( Constants.ConfigurationKeys.ACTIVE_EXECUTOR_REFRESH_IN_NUM_FLOW, 5), this.azkProps.getInt( Constants.ConfigurationKeys.MAX_DISPATCHING_ERRORS_PERMITTED, this.activeExecutors.getAll().size()), this.sleepAfterDispatchFailure); } private void setupExecutotrComparatorWeightsMap() { // initialize comparator feature weights for executor selector from azkaban.properties final Map<String, String> compListStrings = this.azkProps .getMapByPrefix(ConfigurationKeys.EXECUTOR_SELECTOR_COMPARATOR_PREFIX); if (compListStrings != null) { this.comparatorWeightsMap = new TreeMap<>(); for (final Map.Entry<String, String> entry : compListStrings.entrySet()) { this.comparatorWeightsMap.put(entry.getKey(), Integer.valueOf(entry.getValue())); } } } private void setupExecutorFilterList() { // initialize hard filters for executor selector from azkaban.properties final String filters = this.azkProps .getString(ConfigurationKeys.EXECUTOR_SELECTOR_FILTERS, ""); if (filters != null) { this.filterList = Arrays.asList(StringUtils.split(filters, ",")); } } private ExecutorService createExecutorInfoRefresherService() { return Executors.newFixedThreadPool(this.azkProps.getInt( ConfigurationKeys.EXECUTORINFO_REFRESH_MAX_THREADS, 5)); } /** * {@inheritDoc} * * @see azkaban.executor.ExecutorManagerAdapter#setupExecutors() */ @Override public void setupExecutors() throws ExecutorManagerException { checkMultiExecutorMode(); this.activeExecutors.setupExecutors(); } // TODO Enforced for now to ensure that users migrate to multi-executor mode acknowledgingly. // TODO Remove this once confident enough that all active users have already updated to some // version new enough to have this change - for example after 1 year has passed. // TODO Then also delete ConfigurationKeys.USE_MULTIPLE_EXECUTORS. @Deprecated private void checkMultiExecutorMode() { if (!this.azkProps.getBoolean(Constants.ConfigurationKeys.USE_MULTIPLE_EXECUTORS, false)) { throw new IllegalArgumentException( Constants.ConfigurationKeys.USE_MULTIPLE_EXECUTORS + " must be true. Single executor mode is not supported any more."); } } /** * Refresh Executor stats for all the actie executors in this executorManager */ private void refreshExecutors() { final List<Pair<Executor, Future<ExecutorInfo>>> futures = new ArrayList<>(); for (final Executor executor : this.activeExecutors.getAll()) { // execute each executorInfo refresh task to fetch final Future<ExecutorInfo> fetchExecutionInfo = this.executorInfoRefresherService.submit( () -> this.apiGateway.callForJsonType(executor.getHost(), executor.getPort(), "/serverStatistics", null, ExecutorInfo.class)); futures.add(new Pair<>(executor, fetchExecutionInfo)); } boolean wasSuccess = true; for (final Pair<Executor, Future<ExecutorInfo>> refreshPair : futures) { final Executor executor = refreshPair.getFirst(); executor.setExecutorInfo(null); // invalidate cached ExecutorInfo try { // max 5 secs final ExecutorInfo executorInfo = refreshPair.getSecond().get(5, TimeUnit.SECONDS); // executorInfo is null if the response was empty executor.setExecutorInfo(executorInfo); logger.info(String.format( "Successfully refreshed executor: %s with executor info : %s", executor, executorInfo)); } catch (final TimeoutException e) { wasSuccess = false; logger.error("Timed out while waiting for ExecutorInfo refresh" + executor, e); } catch (final Exception e) { wasSuccess = false; logger.error("Failed to update ExecutorInfo for executor : " + executor, e); } // update is successful for all executors if (wasSuccess) { this.lastSuccessfulExecutorInfoRefresh = System.currentTimeMillis(); } } } /** * @see azkaban.executor.ExecutorManagerAdapter#disableQueueProcessorThread() */ @Override public void disableQueueProcessorThread() { this.queueProcessor.setActive(false); } /** * @see azkaban.executor.ExecutorManagerAdapter#enableQueueProcessorThread() */ @Override public void enableQueueProcessorThread() { this.queueProcessor.setActive(true); } public State getQueueProcessorThreadState() { return this.queueProcessor.getState(); } /** * Returns state of QueueProcessor False, no flow is being dispatched True , flows are being * dispatched as expected */ public boolean isQueueProcessorThreadActive() { return this.queueProcessor.isActive(); } /** * Return last Successful ExecutorInfo Refresh for all active executors */ public long getLastSuccessfulExecutorInfoRefresh() { return this.lastSuccessfulExecutorInfoRefresh; } /** * Get currently supported Comparators available to use via azkaban.properties */ public Set<String> getAvailableExecutorComparatorNames() { return ExecutorComparator.getAvailableComparatorNames(); } /** * Get currently supported filters available to use via azkaban.properties */ public Set<String> getAvailableExecutorFilterNames() { return ExecutorFilter.getAvailableFilterNames(); } @Override public State getExecutorManagerThreadState() { return this.updaterThread.getState(); } public String getExecutorThreadStage() { return this.updaterStage.get(); } @Override public boolean isExecutorManagerThreadActive() { return this.updaterThread.isAlive(); } @Override public long getLastExecutorManagerThreadCheckTime() { return this.updaterThread.getLastThreadCheckTime(); } @Override public Collection<Executor> getAllActiveExecutors() { return Collections.unmodifiableCollection(this.activeExecutors.getAll()); } /** * {@inheritDoc} * * @see azkaban.executor.ExecutorManagerAdapter#fetchExecutor(int) */ @Override public Executor fetchExecutor(final int executorId) throws ExecutorManagerException { for (final Executor executor : this.activeExecutors.getAll()) { if (executor.getId() == executorId) { return executor; } } return this.executorLoader.fetchExecutor(executorId); } @Override public Set<String> getPrimaryServerHosts() { // Only one for now. More probably later. final HashSet<String> ports = new HashSet<>(); for (final Executor executor : this.activeExecutors.getAll()) { ports.add(executor.getHost() + ":" + executor.getPort()); } return ports; } @Override public Set<String> getAllActiveExecutorServerHosts() { // Includes non primary server/hosts final HashSet<String> ports = new HashSet<>(); for (final Executor executor : this.activeExecutors.getAll()) { ports.add(executor.getHost() + ":" + executor.getPort()); } // include executor which were initially active and still has flows running for (final Pair<ExecutionReference, ExecutableFlow> running : this.runningExecutions.get() .values()) { final ExecutionReference ref = running.getFirst(); if (ref.getExecutor().isPresent()) { final Executor executor = ref.getExecutor().get(); ports.add(executor.getHost() + ":" + executor.getPort()); } } return ports; } private void loadRunningExecutions() throws ExecutorManagerException { logger.info("Loading running flows from database.."); final Map<Integer, Pair<ExecutionReference, ExecutableFlow>> activeFlows = this.executorLoader .fetchActiveFlows(); logger.info("Loaded " + activeFlows.size() + " running flows"); this.runningExecutions.get().putAll(activeFlows); } /* * load queued flows i.e with active_execution_reference and not assigned to * any executor */ private void loadQueuedFlows() throws ExecutorManagerException { final List<Pair<ExecutionReference, ExecutableFlow>> retrievedExecutions = this.executorLoader.fetchQueuedFlows(); if (retrievedExecutions != null) { for (final Pair<ExecutionReference, ExecutableFlow> pair : retrievedExecutions) { this.queuedFlows.enqueue(pair.getSecond(), pair.getFirst()); } } } /** * Gets a list of all the active (running flows and non-dispatched flows) executions for a given * project and flow {@inheritDoc}. Results should be sorted as we assume this while setting up * pipelined execution Id. * * @see azkaban.executor.ExecutorManagerAdapter#getRunningFlows(int, java.lang.String) */ @Override public List<Integer> getRunningFlows(final int projectId, final String flowId) { final List<Integer> executionIds = new ArrayList<>(); executionIds.addAll(getRunningFlowsHelper(projectId, flowId, this.queuedFlows.getAllEntries())); // it's possible an execution is runningCandidate, meaning it's in dispatching state neither in queuedFlows nor runningFlows, // so checks the runningCandidate as well. if (this.runningCandidate != null) { executionIds .addAll( getRunningFlowsHelper(projectId, flowId, Lists.newArrayList(this.runningCandidate))); } executionIds.addAll(getRunningFlowsHelper(projectId, flowId, this.runningExecutions.get().values())); Collections.sort(executionIds); return executionIds; } /* Helper method for getRunningFlows */ private List<Integer> getRunningFlowsHelper(final int projectId, final String flowId, final Collection<Pair<ExecutionReference, ExecutableFlow>> collection) { final List<Integer> executionIds = new ArrayList<>(); for (final Pair<ExecutionReference, ExecutableFlow> ref : collection) { if (ref.getSecond().getFlowId().equals(flowId) && ref.getSecond().getProjectId() == projectId) { executionIds.add(ref.getFirst().getExecId()); } } return executionIds; } /** * {@inheritDoc} * * @see azkaban.executor.ExecutorManagerAdapter#getActiveFlowsWithExecutor() */ @Override public List<Pair<ExecutableFlow, Optional<Executor>>> getActiveFlowsWithExecutor() throws IOException { final List<Pair<ExecutableFlow, Optional<Executor>>> flows = new ArrayList<>(); getActiveFlowsWithExecutorHelper(flows, this.queuedFlows.getAllEntries()); getActiveFlowsWithExecutorHelper(flows, this.runningExecutions.get().values()); return flows; } /* Helper method for getActiveFlowsWithExecutor */ private void getActiveFlowsWithExecutorHelper( final List<Pair<ExecutableFlow, Optional<Executor>>> flows, final Collection<Pair<ExecutionReference, ExecutableFlow>> collection) { for (final Pair<ExecutionReference, ExecutableFlow> ref : collection) { flows.add(new Pair<>(ref.getSecond(), ref .getFirst().getExecutor())); } } /** * Checks whether the given flow has an active (running, non-dispatched) executions {@inheritDoc} * * @see azkaban.executor.ExecutorManagerAdapter#isFlowRunning(int, java.lang.String) */ @Override public boolean isFlowRunning(final int projectId, final String flowId) { boolean isRunning = false; isRunning = isRunning || isFlowRunningHelper(projectId, flowId, this.queuedFlows.getAllEntries()); isRunning = isRunning || isFlowRunningHelper(projectId, flowId, this.runningExecutions.get().values()); return isRunning; } /* Search a running flow in a collection */ private boolean isFlowRunningHelper(final int projectId, final String flowId, final Collection<Pair<ExecutionReference, ExecutableFlow>> collection) { for (final Pair<ExecutionReference, ExecutableFlow> ref : collection) { if (ref.getSecond().getProjectId() == projectId && ref.getSecond().getFlowId().equals(flowId)) { return true; } } return false; } /** * Fetch ExecutableFlow from database {@inheritDoc} * * @see azkaban.executor.ExecutorManagerAdapter#getExecutableFlow(int) */ @Override public ExecutableFlow getExecutableFlow(final int execId) throws ExecutorManagerException { return this.executorLoader.fetchExecutableFlow(execId); } /** * Get all active (running, non-dispatched) flows * * {@inheritDoc} * * @see azkaban.executor.ExecutorManagerAdapter#getRunningFlows() */ @Override public List<ExecutableFlow> getRunningFlows() { final ArrayList<ExecutableFlow> flows = new ArrayList<>(); getActiveFlowHelper(flows, this.queuedFlows.getAllEntries()); getActiveFlowHelper(flows, this.runningExecutions.get().values()); return flows; } /* * Helper method to get all running flows from a Pair<ExecutionReference, * ExecutableFlow collection */ private void getActiveFlowHelper(final ArrayList<ExecutableFlow> flows, final Collection<Pair<ExecutionReference, ExecutableFlow>> collection) { for (final Pair<ExecutionReference, ExecutableFlow> ref : collection) { flows.add(ref.getSecond()); } } /** * Get execution Ids of all running (unfinished) flows */ public String getRunningFlowIds() { final List<Integer> allIds = new ArrayList<>(); getRunningFlowsIdsHelper(allIds, this.queuedFlows.getAllEntries()); getRunningFlowsIdsHelper(allIds, this.runningExecutions.get().values()); Collections.sort(allIds); return allIds.toString(); } /** * Get execution Ids of all non-dispatched flows */ public String getQueuedFlowIds() { final List<Integer> allIds = new ArrayList<>(); getRunningFlowsIdsHelper(allIds, this.queuedFlows.getAllEntries()); Collections.sort(allIds); return allIds.toString(); } /** * Get the number of non-dispatched flows. {@inheritDoc} */ @Override public long getQueuedFlowSize() { return this.queuedFlows.size(); } /* Helper method to flow ids of all running flows */ private void getRunningFlowsIdsHelper(final List<Integer> allIds, final Collection<Pair<ExecutionReference, ExecutableFlow>> collection) { for (final Pair<ExecutionReference, ExecutableFlow> ref : collection) { allIds.add(ref.getSecond().getExecutionId()); } } @Override public List<ExecutableFlow> getRecentlyFinishedFlows() { List<ExecutableFlow> flows = new ArrayList<>(); try { flows = this.executorLoader.fetchRecentlyFinishedFlows( RECENTLY_FINISHED_LIFETIME); } catch (final ExecutorManagerException e) { //Todo jamiesjc: fix error handling. logger.error("Failed to fetch recently finished flows.", e); } return flows; } @Override public List<ExecutableFlow> getExecutableFlows(final int skip, final int size) throws ExecutorManagerException { final List<ExecutableFlow> flows = this.executorLoader.fetchFlowHistory(skip, size); return flows; } @Override public List<ExecutableFlow> getExecutableFlows(final String flowIdContains, final int skip, final int size) throws ExecutorManagerException { final List<ExecutableFlow> flows = this.executorLoader.fetchFlowHistory(null, '%' + flowIdContains + '%', null, 0, -1, -1, skip, size); return flows; } @Override public List<ExecutableFlow> getExecutableFlows(final String projContain, final String flowContain, final String userContain, final int status, final long begin, final long end, final int skip, final int size) throws ExecutorManagerException { final List<ExecutableFlow> flows = this.executorLoader.fetchFlowHistory(projContain, flowContain, userContain, status, begin, end, skip, size); return flows; } @Override public List<ExecutableJobInfo> getExecutableJobs(final Project project, final String jobId, final int skip, final int size) throws ExecutorManagerException { final List<ExecutableJobInfo> nodes = this.executorLoader.fetchJobHistory(project.getId(), jobId, skip, size); return nodes; } @Override public int getNumberOfJobExecutions(final Project project, final String jobId) throws ExecutorManagerException { return this.executorLoader.fetchNumExecutableNodes(project.getId(), jobId); } @Override public LogData getExecutableFlowLog(final ExecutableFlow exFlow, final int offset, final int length) throws ExecutorManagerException { final Pair<ExecutionReference, ExecutableFlow> pair = this.runningExecutions.get().get(exFlow.getExecutionId()); if (pair != null) { final Pair<String, String> typeParam = new Pair<>("type", "flow"); final Pair<String, String> offsetParam = new Pair<>("offset", String.valueOf(offset)); final Pair<String, String> lengthParam = new Pair<>("length", String.valueOf(length)); @SuppressWarnings("unchecked") final Map<String, Object> result = this.apiGateway.callWithReference(pair.getFirst(), ConnectorParams.LOG_ACTION, typeParam, offsetParam, lengthParam); return LogData.createLogDataFromObject(result); } else { final LogData value = this.executorLoader.fetchLogs(exFlow.getExecutionId(), "", 0, offset, length); return value; } } @Override public LogData getExecutionJobLog(final ExecutableFlow exFlow, final String jobId, final int offset, final int length, final int attempt) throws ExecutorManagerException { final Pair<ExecutionReference, ExecutableFlow> pair = this.runningExecutions.get().get(exFlow.getExecutionId()); if (pair != null) { final Pair<String, String> typeParam = new Pair<>("type", "job"); final Pair<String, String> jobIdParam = new Pair<>("jobId", jobId); final Pair<String, String> offsetParam = new Pair<>("offset", String.valueOf(offset)); final Pair<String, String> lengthParam = new Pair<>("length", String.valueOf(length)); final Pair<String, String> attemptParam = new Pair<>("attempt", String.valueOf(attempt)); @SuppressWarnings("unchecked") final Map<String, Object> result = this.apiGateway.callWithReference(pair.getFirst(), ConnectorParams.LOG_ACTION, typeParam, jobIdParam, offsetParam, lengthParam, attemptParam); return LogData.createLogDataFromObject(result); } else { final LogData value = this.executorLoader.fetchLogs(exFlow.getExecutionId(), jobId, attempt, offset, length); return value; } } @Override public List<Object> getExecutionJobStats(final ExecutableFlow exFlow, final String jobId, final int attempt) throws ExecutorManagerException { final Pair<ExecutionReference, ExecutableFlow> pair = this.runningExecutions.get().get(exFlow.getExecutionId()); if (pair == null) { return this.executorLoader.fetchAttachments(exFlow.getExecutionId(), jobId, attempt); } final Pair<String, String> jobIdParam = new Pair<>("jobId", jobId); final Pair<String, String> attemptParam = new Pair<>("attempt", String.valueOf(attempt)); @SuppressWarnings("unchecked") final Map<String, Object> result = this.apiGateway.callWithReference(pair.getFirst(), ConnectorParams.ATTACHMENTS_ACTION, jobIdParam, attemptParam); @SuppressWarnings("unchecked") final List<Object> jobStats = (List<Object>) result .get("attachments"); return jobStats; } /** * If the Resource Manager and Job History server urls are configured, find all the * Hadoop/Spark application ids present in the Azkaban job's log and then construct the url to * job logs in the Hadoop/Spark server for each application id found. Application ids are * returned in the order they appear in the Azkaban job log. * * @param exFlow The executable flow. * @param jobId The job id. * @param attempt The job execution attempt. * @return The map of (application id, job log url) */ @Override public Map<String, String> getExternalJobLogUrls(final ExecutableFlow exFlow, final String jobId, final int attempt) { final Map<String, String> jobLogUrlsByAppId = new LinkedHashMap<>(); if (!this.azkProps.containsKey(ConfigurationKeys.RESOURCE_MANAGER_JOB_URL) || !this.azkProps.containsKey(ConfigurationKeys.HISTORY_SERVER_JOB_URL) || !this.azkProps.containsKey(ConfigurationKeys.SPARK_HISTORY_SERVER_JOB_URL)) { return jobLogUrlsByAppId; } final Set<String> applicationIds = getApplicationIds(exFlow, jobId, attempt); for (final String applicationId : applicationIds) { final String jobLogUrl = ExecutionControllerUtils .createJobLinkUrl(exFlow, jobId, applicationId, this.azkProps); if (jobLogUrl != null) { jobLogUrlsByAppId.put(applicationId, jobLogUrl); } } return jobLogUrlsByAppId; } /** * Find all the Hadoop/Spark application ids present in the Azkaban job log. When iterating * over the set returned by this method the application ids are in the same order they appear * in the log. * * @param exFlow The executable flow. * @param jobId The job id. * @param attempt The job execution attempt. * @return The application ids found. */ Set<String> getApplicationIds(final ExecutableFlow exFlow, final String jobId, final int attempt) { final Set<String> applicationIds = new LinkedHashSet<>(); int offset = 0; try { LogData data = getExecutionJobLog(exFlow, jobId, offset, 50000, attempt); while (data != null && data.getLength() > 0) { this.logger.info("Get application ID for execution " + exFlow.getExecutionId() + ", job" + " " + jobId + ", attempt " + attempt + ", data offset " + offset); String logData = data.getData(); final int indexOfLastSpace = logData.lastIndexOf(' '); final int indexOfLastTab = logData.lastIndexOf('\t'); final int indexOfLastEoL = logData.lastIndexOf('\n'); final int indexOfLastDelim = Math .max(indexOfLastEoL, Math.max(indexOfLastSpace, indexOfLastTab)); if (indexOfLastDelim > -1) { // index + 1 to avoid looping forever if indexOfLastDelim is zero logData = logData.substring(0, indexOfLastDelim + 1); } applicationIds.addAll(ExecutionControllerUtils.findApplicationIdsFromLog(logData)); offset = data.getOffset() + logData.length(); data = getExecutionJobLog(exFlow, jobId, offset, 50000, attempt); } } catch (final ExecutorManagerException e) { this.logger.error("Failed to get application ID for execution " + exFlow.getExecutionId() + ", job " + jobId + ", attempt " + attempt + ", data offset " + offset, e); } return applicationIds; } /** * if flows was dispatched to an executor, cancel by calling Executor else if flow is still in * queue, remove from queue and finalize {@inheritDoc} * * @see azkaban.executor.ExecutorManagerAdapter#cancelFlow(azkaban.executor.ExecutableFlow, * java.lang.String) */ @Override public void cancelFlow(final ExecutableFlow exFlow, final String userId) throws ExecutorManagerException { synchronized (exFlow) { if (this.runningExecutions.get().containsKey(exFlow.getExecutionId())) { final Pair<ExecutionReference, ExecutableFlow> pair = this.runningExecutions.get().get(exFlow.getExecutionId()); this.apiGateway.callWithReferenceByUser(pair.getFirst(), ConnectorParams.CANCEL_ACTION, userId); } else if (this.queuedFlows.hasExecution(exFlow.getExecutionId())) { this.queuedFlows.dequeue(exFlow.getExecutionId()); this.executionFinalizer .finalizeFlow(exFlow, "Cancelled before dispatching to executor", null); } else { throw new ExecutorManagerException("Execution " + exFlow.getExecutionId() + " of flow " + exFlow.getFlowId() + " isn't running."); } } } @Override public void resumeFlow(final ExecutableFlow exFlow, final String userId) throws ExecutorManagerException { synchronized (exFlow) { final Pair<ExecutionReference, ExecutableFlow> pair = this.runningExecutions.get().get(exFlow.getExecutionId()); if (pair == null) { throw new ExecutorManagerException("Execution " + exFlow.getExecutionId() + " of flow " + exFlow.getFlowId() + " isn't running."); } this.apiGateway .callWithReferenceByUser(pair.getFirst(), ConnectorParams.RESUME_ACTION, userId); } } @Override public void pauseFlow(final ExecutableFlow exFlow, final String userId) throws ExecutorManagerException { synchronized (exFlow) { final Pair<ExecutionReference, ExecutableFlow> pair = this.runningExecutions.get().get(exFlow.getExecutionId()); if (pair == null) { throw new ExecutorManagerException("Execution " + exFlow.getExecutionId() + " of flow " + exFlow.getFlowId() + " isn't running."); } this.apiGateway .callWithReferenceByUser(pair.getFirst(), ConnectorParams.PAUSE_ACTION, userId); } } @Override public void retryFailures(final ExecutableFlow exFlow, final String userId) throws ExecutorManagerException { modifyExecutingJobs(exFlow, ConnectorParams.MODIFY_RETRY_FAILURES, userId); } @SuppressWarnings("unchecked") private Map<String, Object> modifyExecutingJobs(final ExecutableFlow exFlow, final String command, final String userId, final String... jobIds) throws ExecutorManagerException { synchronized (exFlow) { final Pair<ExecutionReference, ExecutableFlow> pair = this.runningExecutions.get().get(exFlow.getExecutionId()); if (pair == null) { throw new ExecutorManagerException("Execution " + exFlow.getExecutionId() + " of flow " + exFlow.getFlowId() + " isn't running."); } final Map<String, Object> response; if (jobIds != null && jobIds.length > 0) { for (final String jobId : jobIds) { if (!jobId.isEmpty()) { final ExecutableNode node = exFlow.getExecutableNode(jobId); if (node == null) { throw new ExecutorManagerException("Job " + jobId + " doesn't exist in execution " + exFlow.getExecutionId() + "."); } } } final String ids = StringUtils.join(jobIds, ','); response = this.apiGateway.callWithReferenceByUser(pair.getFirst(), ConnectorParams.MODIFY_EXECUTION_ACTION, userId, new Pair<>( ConnectorParams.MODIFY_EXECUTION_ACTION_TYPE, command), new Pair<>(ConnectorParams.MODIFY_JOBS_LIST, ids)); } else { response = this.apiGateway.callWithReferenceByUser(pair.getFirst(), ConnectorParams.MODIFY_EXECUTION_ACTION, userId, new Pair<>( ConnectorParams.MODIFY_EXECUTION_ACTION_TYPE, command)); } return response; } } @Override public String submitExecutableFlow(final ExecutableFlow exflow, final String userId) throws ExecutorManagerException { if (exflow.isLocked()) { // Skip execution for locked flows. final String message = String.format("Flow %s for project %s is locked.", exflow.getId(), exflow.getProjectName()); logger.info(message); return message; } final String exFlowKey = exflow.getProjectName() + "." + exflow.getId() + ".submitFlow"; // using project and flow name to prevent race condition when same flow is submitted by API and schedule at the same time // causing two same flow submission entering this piece. synchronized (exFlowKey.intern()) { final String flowId = exflow.getFlowId(); logger.info("Submitting execution flow " + flowId + " by " + userId); String message = ""; if (this.queuedFlows.isFull()) { message = String .format( "Failed to submit %s for project %s. Azkaban has overrun its webserver queue capacity", flowId, exflow.getProjectName()); logger.error(message); this.commonMetrics.markSubmitFlowFail(); } else { final int projectId = exflow.getProjectId(); exflow.setSubmitUser(userId); exflow.setStatus(Status.PREPARING); exflow.setSubmitTime(System.currentTimeMillis()); // Get collection of running flows given a project and a specific flow name final List<Integer> running = getRunningFlows(projectId, flowId); ExecutionOptions options = exflow.getExecutionOptions(); if (options == null) { options = new ExecutionOptions(); } if (options.getDisabledJobs() != null) { FlowUtils.applyDisabledJobs(options.getDisabledJobs(), exflow); } if (!running.isEmpty()) { final int maxConcurrentRuns = ExecutorUtils.getMaxConcurrentRunsForFlow( exflow.getProjectName(), flowId, this.maxConcurrentRunsOneFlow, this.maxConcurrentRunsPerFlowMap); if (running.size() > maxConcurrentRuns) { this.commonMetrics.markSubmitFlowSkip(); throw new ExecutorManagerException("Flow " + flowId + " has more than " + maxConcurrentRuns + " concurrent runs. Skipping", ExecutorManagerException.Reason.SkippedExecution); } else if (options.getConcurrentOption().equals( ExecutionOptions.CONCURRENT_OPTION_PIPELINE)) { Collections.sort(running); final Integer runningExecId = running.get(running.size() - 1); options.setPipelineExecutionId(runningExecId); message = "Flow " + flowId + " is already running with exec id " + runningExecId + ". Pipelining level " + options.getPipelineLevel() + ". \n"; } else if (options.getConcurrentOption().equals( ExecutionOptions.CONCURRENT_OPTION_SKIP)) { this.commonMetrics.markSubmitFlowSkip(); throw new ExecutorManagerException("Flow " + flowId + " is already running. Skipping execution.", ExecutorManagerException.Reason.SkippedExecution); } else { // The settings is to run anyways. message = "Flow " + flowId + " is already running with exec id " + StringUtils.join(running, ",") + ". Will execute concurrently. \n"; } } final boolean memoryCheck = !ProjectWhitelist.isProjectWhitelisted(exflow.getProjectId(), ProjectWhitelist.WhitelistType.MemoryCheck); options.setMemoryCheck(memoryCheck); // The exflow id is set by the loader. So it's unavailable until after // this call. this.executorLoader.uploadExecutableFlow(exflow); // We create an active flow reference in the datastore. If the upload // fails, we remove the reference. final ExecutionReference reference = new ExecutionReference(exflow.getExecutionId()); this.executorLoader.addActiveExecutableReference(reference); this.queuedFlows.enqueue(exflow, reference); message += "Execution queued successfully with exec id " + exflow.getExecutionId(); this.commonMetrics.markSubmitFlowSuccess(); } return message; } } @Override public Map<String, String> doRampActions(List<Map<String, Object>> rampActions) throws ExecutorManagerException { return this.executorLoader.doRampActions(rampActions); } /** * Manage servlet call for stats servlet in Azkaban execution server {@inheritDoc} * * @see azkaban.executor.ExecutorManagerAdapter#callExecutorStats(int, java.lang.String, * azkaban.utils.Pair[]) */ @Override public Map<String, Object> callExecutorStats(final int executorId, final String action, final Pair<String, String>... params) throws IOException, ExecutorManagerException { final Executor executor = fetchExecutor(executorId); final List<Pair<String, String>> paramList = new ArrayList<>(); // if params = null if (params != null) { paramList.addAll(Arrays.asList(params)); } paramList .add(new Pair<>(ConnectorParams.ACTION_PARAM, action)); return this.apiGateway.callForJsonObjectMap(executor.getHost(), executor.getPort(), "/stats", paramList); } @Override public Map<String, Object> callExecutorJMX(final String hostPort, final String action, final String mBean) throws IOException { final List<Pair<String, String>> paramList = new ArrayList<>(); paramList.add(new Pair<>(action, "")); if (mBean != null) { paramList.add(new Pair<>(ConnectorParams.JMX_MBEAN, mBean)); } final String[] hostPortSplit = hostPort.split(":"); return this.apiGateway.callForJsonObjectMap(hostPortSplit[0], Integer.valueOf(hostPortSplit[1]), "/jmx", paramList); } @Override public void shutdown() { if(null != this.queueProcessor) { this.queueProcessor.shutdown(); } if(null != this.updaterThread) { this.updaterThread.shutdown(); } } @Override public int getExecutableFlows(final int projectId, final String flowId, final int from, final int length, final List<ExecutableFlow> outputList) throws ExecutorManagerException { final List<ExecutableFlow> flows = this.executorLoader.fetchFlowHistory(projectId, flowId, from, length); outputList.addAll(flows); return this.executorLoader.fetchNumExecutableFlows(projectId, flowId); } @Override public List<ExecutableFlow> getExecutableFlows(final int projectId, final String flowId, final int from, final int length, final Status status) throws ExecutorManagerException { return this.executorLoader.fetchFlowHistory(projectId, flowId, from, length, status); } /** * Calls executor to dispatch the flow, update db to assign the executor and in-memory state of * executableFlow. */ private void dispatch(final ExecutionReference reference, final ExecutableFlow exflow, final Executor choosenExecutor) throws ExecutorManagerException { exflow.setUpdateTime(System.currentTimeMillis()); this.executorLoader.assignExecutor(choosenExecutor.getId(), exflow.getExecutionId()); try { this.apiGateway.callWithExecutable(exflow, choosenExecutor, ConnectorParams.EXECUTE_ACTION); } catch (final ExecutorManagerException ex) { logger.error("Rolling back executor assignment for execution id:" + exflow.getExecutionId(), ex); this.executorLoader.unassignExecutor(exflow.getExecutionId()); throw new ExecutorManagerException(ex); } reference.setExecutor(choosenExecutor); // move from flow to running flows this.runningExecutions.get().put(exflow.getExecutionId(), new Pair<>(reference, exflow)); synchronized (this.runningExecutions.get()) { // Wake up RunningExecutionsUpdaterThread from wait() so that it will immediately check status // from executor(s). Normally flows will run at least some time and can't be cleaned up // immediately, so there will be another wait round (or many, actually), but for unit tests // this is significant to let them run quickly. this.runningExecutions.get().notifyAll(); } synchronized (this) { // wake up all internal waiting threads, too this.notifyAll(); } logger.info(String.format( "Successfully dispatched exec %d with error count %d", exflow.getExecutionId(), reference.getNumErrors())); } @VisibleForTesting void setSleepAfterDispatchFailure(final Duration sleepAfterDispatchFailure) { this.sleepAfterDispatchFailure = sleepAfterDispatchFailure; } /* * This thread is responsible for processing queued flows using dispatcher and * making rest api calls to executor server */ private class QueueProcessorThread extends Thread { private static final long QUEUE_PROCESSOR_WAIT_IN_MS = 1000; private final int maxDispatchingErrors; private final long activeExecutorRefreshWindowInMillisec; private final int activeExecutorRefreshWindowInFlows; private final Duration sleepAfterDispatchFailure; private volatile boolean shutdown = false; private volatile boolean isActive = true; public QueueProcessorThread(final boolean isActive, final long activeExecutorRefreshWindowInTime, final int activeExecutorRefreshWindowInFlows, final int maxDispatchingErrors, final Duration sleepAfterDispatchFailure) { setActive(isActive); this.maxDispatchingErrors = maxDispatchingErrors; this.activeExecutorRefreshWindowInFlows = activeExecutorRefreshWindowInFlows; this.activeExecutorRefreshWindowInMillisec = activeExecutorRefreshWindowInTime; this.sleepAfterDispatchFailure = sleepAfterDispatchFailure; this.setName("AzkabanWebServer-QueueProcessor-Thread"); } public boolean isActive() { return this.isActive; } public void setActive(final boolean isActive) { this.isActive = isActive; ExecutorManager.logger.info("QueueProcessorThread active turned " + this.isActive); } public void shutdown() { this.shutdown = true; this.interrupt(); } @Override public void run() { // Loops till QueueProcessorThread is shutdown while (!this.shutdown) { synchronized (this) { try { // start processing queue if active, other wait for sometime if (this.isActive) { processQueuedFlows(this.activeExecutorRefreshWindowInMillisec, this.activeExecutorRefreshWindowInFlows); } wait(QUEUE_PROCESSOR_WAIT_IN_MS); } catch (final Exception e) { ExecutorManager.logger.error( "QueueProcessorThread Interrupted. Probably to shut down.", e); } } } } /* Method responsible for processing the non-dispatched flows */ private void processQueuedFlows(final long activeExecutorsRefreshWindow, final int maxContinuousFlowProcessed) throws InterruptedException, ExecutorManagerException { long lastExecutorRefreshTime = 0; int currentContinuousFlowProcessed = 0; while (isActive() && (ExecutorManager.this.runningCandidate = ExecutorManager.this.queuedFlows .fetchHead()) != null) { final ExecutionReference reference = ExecutorManager.this.runningCandidate.getFirst(); final ExecutableFlow exflow = ExecutorManager.this.runningCandidate.getSecond(); final long currentTime = System.currentTimeMillis(); // if we have dispatched more than maxContinuousFlowProcessed or // It has been more then activeExecutorsRefreshWindow millisec since we // refreshed if (currentTime - lastExecutorRefreshTime > activeExecutorsRefreshWindow || currentContinuousFlowProcessed >= maxContinuousFlowProcessed) { // Refresh executorInfo for all activeExecutors refreshExecutors(); lastExecutorRefreshTime = currentTime; currentContinuousFlowProcessed = 0; } /** * <pre> * TODO: Work around till we improve Filters to have a notion of GlobalSystemState. * Currently we try each queued flow once to infer a global busy state * Possible improvements:- * 1. Move system level filters in refreshExecutors and sleep if we have all executors busy after refresh * 2. Implement GlobalSystemState in selector or in a third place to manage system filters. Basically * taking out all the filters which do not depend on the flow but are still being part of Selector. * Assumptions:- * 1. no one else except QueueProcessor is updating ExecutableFlow update time * 2. re-attempting a flow (which has been tried before) is considered as all executors are busy * </pre> */ if (exflow.getUpdateTime() > lastExecutorRefreshTime) { // put back in the queue ExecutorManager.this.queuedFlows.enqueue(exflow, reference); ExecutorManager.this.runningCandidate = null; final long sleepInterval = activeExecutorsRefreshWindow - (currentTime - lastExecutorRefreshTime); // wait till next executor refresh Thread.sleep(sleepInterval); } else { exflow.setUpdateTime(currentTime); // process flow with current snapshot of activeExecutors selectExecutorAndDispatchFlow(reference, exflow); ExecutorManager.this.runningCandidate = null; } // do not count failed flow processsing (flows still in queue) if (ExecutorManager.this.queuedFlows.getFlow(exflow.getExecutionId()) == null) { currentContinuousFlowProcessed++; } } } /* process flow with a snapshot of available Executors */ private void selectExecutorAndDispatchFlow(final ExecutionReference reference, final ExecutableFlow exflow) throws ExecutorManagerException { final Set<Executor> remainingExecutors = new HashSet<>( ExecutorManager.this.activeExecutors.getAll()); Throwable lastError; synchronized (exflow) { do { final Executor selectedExecutor = selectExecutor(exflow, remainingExecutors); if (selectedExecutor == null) { ExecutorManager.this.commonMetrics.markDispatchFail(); handleNoExecutorSelectedCase(reference, exflow); // RE-QUEUED - exit return; } else { try { dispatch(reference, exflow, selectedExecutor); ExecutorManager.this.commonMetrics.markDispatchSuccess(); // SUCCESS - exit return; } catch (final ExecutorManagerException e) { lastError = e; logFailedDispatchAttempt(reference, exflow, selectedExecutor, e); ExecutorManager.this.commonMetrics.markDispatchFail(); reference.setNumErrors(reference.getNumErrors() + 1); // FAILED ATTEMPT - try other executors except selectedExecutor updateRemainingExecutorsAndSleep(remainingExecutors, selectedExecutor); } } } while (reference.getNumErrors() < this.maxDispatchingErrors); // GAVE UP DISPATCHING final String message = "Failed to dispatch queued execution " + exflow.getId() + " because " + "reached " + ConfigurationKeys.MAX_DISPATCHING_ERRORS_PERMITTED + " (tried " + reference.getNumErrors() + " executors)"; ExecutorManager.logger.error(message); ExecutorManager.this.executionFinalizer.finalizeFlow(exflow, message, lastError); } } private void updateRemainingExecutorsAndSleep(final Set<Executor> remainingExecutors, final Executor selectedExecutor) { remainingExecutors.remove(selectedExecutor); if (remainingExecutors.isEmpty()) { remainingExecutors.addAll(ExecutorManager.this.activeExecutors.getAll()); sleepAfterDispatchFailure(); } } private void sleepAfterDispatchFailure() { try { Thread.sleep(this.sleepAfterDispatchFailure.toMillis()); } catch (final InterruptedException e1) { ExecutorManager.logger.warn("Sleep after dispatch failure was interrupted - ignoring"); } } private void logFailedDispatchAttempt(final ExecutionReference reference, final ExecutableFlow exflow, final Executor selectedExecutor, final ExecutorManagerException e) { ExecutorManager.logger.warn(String.format( "Executor %s responded with exception for exec: %d", selectedExecutor, exflow.getExecutionId()), e); ExecutorManager.logger.info(String.format( "Failed dispatch attempt for exec %d with error count %d", exflow.getExecutionId(), reference.getNumErrors())); } /* Helper method to fetch overriding Executor, if a valid user has specifed otherwise return null */ private Executor getUserSpecifiedExecutor(final ExecutionOptions options, final int executionId) { Executor executor = null; if (options != null && options.getFlowParameters() != null && options.getFlowParameters().containsKey( ExecutionOptions.USE_EXECUTOR)) { try { final int executorId = Integer.valueOf(options.getFlowParameters().get( ExecutionOptions.USE_EXECUTOR)); executor = fetchExecutor(executorId); if (executor == null) { ExecutorManager.logger .warn(String .format( "User specified executor id: %d for execution id: %d is not active, Looking up db.", executorId, executionId)); executor = ExecutorManager.this.executorLoader.fetchExecutor(executorId); if (executor == null) { ExecutorManager.logger .warn(String .format( "User specified executor id: %d for execution id: %d is missing from db. Defaulting to availableExecutors", executorId, executionId)); } } } catch (final ExecutorManagerException ex) { ExecutorManager.logger.error("Failed to fetch user specified executor for exec_id = " + executionId, ex); } } return executor; } /* Choose Executor for exflow among the available executors */ private Executor selectExecutor(final ExecutableFlow exflow, final Set<Executor> availableExecutors) { Executor choosenExecutor = getUserSpecifiedExecutor(exflow.getExecutionOptions(), exflow.getExecutionId()); // If no executor was specified by admin if (choosenExecutor == null) { ExecutorManager.logger.info("Using dispatcher for execution id :" + exflow.getExecutionId()); final ExecutorSelector selector = new ExecutorSelector(ExecutorManager.this.filterList, ExecutorManager.this.comparatorWeightsMap); choosenExecutor = selector.getBest(availableExecutors, exflow); } return choosenExecutor; } private void handleNoExecutorSelectedCase(final ExecutionReference reference, final ExecutableFlow exflow) throws ExecutorManagerException { ExecutorManager.logger .info(String .format( "Reached handleNoExecutorSelectedCase stage for exec %d with error count %d", exflow.getExecutionId(), reference.getNumErrors())); // TODO: handle scenario where a high priority flow failing to get // schedule can starve all others ExecutorManager.this.queuedFlows.enqueue(exflow, reference); } } }
package ar.util; import java.awt.Color; import java.awt.Shape; import java.awt.geom.AffineTransform; import java.awt.geom.Point2D; import java.awt.geom.Rectangle2D; import java.awt.image.BufferedImage; import java.io.File; import java.io.Serializable; import java.lang.reflect.Array; import java.lang.reflect.Method; import java.util.Arrays; import java.util.Comparator; import java.util.Iterator; import javax.imageio.ImageIO; import ar.Aggregates; import ar.Glyph; import ar.Glyphset; import ar.glyphsets.SimpleGlyph; import ar.glyphsets.implicitgeometry.Indexed; import ar.glyphsets.implicitgeometry.Shaper; import ar.glyphsets.implicitgeometry.Valuer; import ar.glyphsets.implicitgeometry.Indexed.Converter; import ar.renderers.ThreadpoolRenderer; /**Collection of various utilities that don't have other homes.**/ public class Util { /**Color representing clear (fully transparent).**/ public static final Color CLEAR = new Color(0,0,0,0); /**Lookup a key/value pair in an argument list.**/ public static String argKey(String[] args, String flag, String def) { flag = flag.toUpperCase(); for (int i=0; i<args.length; i++) { if (args[i].toUpperCase().equals(flag)) {return args[i+1];} } return def; } ///---------- TOOLS FOR PRETENDING THAT POINTS ARE SHAPES TOO!! ---------------------------- /**Check for intersection between a rectangle and another (presumably) geometric object.**/ public static boolean intersects(Rectangle2D r, Object o) { if (o instanceof Point2D) {return intersects(r, (Point2D) o);} if (o instanceof Shape) {return intersects(r, (Shape) o);} throw new IllegalArgumentException("Object passed must be either a shape or a point."); } public static boolean intersects(Rectangle2D r, Point2D p) {return r.contains(p);} public static boolean intersects(Rectangle2D r, Shape s) {return s.intersects(r);} public static Rectangle2D boundOne(Object o) { if (o instanceof Point2D) {return boundOne((Point2D) o);} if (o instanceof Shape) {return boundOne((Shape) o);} throw new IllegalArgumentException("Object passed must be either a shape or a point. Recieved: " + o.getClass().getName()); } public static Rectangle2D boundOne(Shape s) {return s.getBounds2D();} public static Rectangle2D boundOne(Point2D p) {return new Rectangle2D.Double(p.getX(), p.getY(), Double.MIN_VALUE, Double.MIN_VALUE);} ///------------------------------------------------------------------------------------------------ /**Compute bounds of the glyphset. * * Uses Segment and bounds on segments. The caller is responsible for avoiding recursive casees. * **/ public static <G,I> Rectangle2D bounds(Glyphset<G,I> glyphs) { int taskCount = ThreadpoolRenderer.RENDER_POOL_SIZE * ThreadpoolRenderer.RENDER_THREAD_LOAD; return glyphs.segment(taskCount).stream().parallel() .map(s -> s.bounds()) .reduce((l,r) -> Util.bounds(l,r)) .orElse(new Rectangle2D.Double(Double.NaN, Double.NaN, Double.NEGATIVE_INFINITY, Double.NEGATIVE_INFINITY)); } /**What bounding box closely contains all of the glyphs in the passed collection.**/ public static <G> Rectangle2D bounds(Iterable<? extends Glyph<G, ?>> glyphs) {return bounds(glyphs.iterator());} /**What bounding box closely contains all of the glyphs covered by the iterator.**/ public static <G> Rectangle2D bounds(Iterator<? extends Glyph<G, ?>> glyphs) { Glyph<G,?> first = glyphs.hasNext() ? glyphs.next() : null; if (first == null) {return null;} Rectangle2D bounds = Util.boundOne(first.shape()); while (glyphs.hasNext()) { Glyph<G, ?> g = glyphs.next(); if (g == null) {continue;} G shape = g.shape(); if (shape instanceof Point2D) { bounds.add((Point2D) shape); } else if (shape instanceof Shape) { Rectangle2D bound = ((Shape) shape).getBounds2D(); if (bound != null) {add(bounds, bound);} } } return bounds; } /**What bounding box closely contains all of the glyphs passed.**/ public static Rectangle2D bounds(Rectangle2D... rs) { if (rs.length == 0) {return null;} Rectangle2D bounds = null; for (Rectangle2D r: rs) { if (r != null) { bounds = r.getBounds2D(); break; } } if (bounds == null) {return null;} for (Rectangle2D r: rs) { if (r != null) {add(bounds, r);} } return bounds; } /**Mean of two values.**/ public static final int mean(int low, int high) {return low+((high-low)/2);} public static final long mean(long low, long high) {return low+((high-low)/2);} /**Load a set of glyphs from a delimited reader, using the provided shaper and valuer. * * This method creates concrete geometry, though it uses the implicit geometry system to achieve it. * * @param glyphs Glyphset to load items into * @param reader Source of the glyph data * @param converter Convert read entries to indexed entries * @param shaper Convert the read item into a shape * @param valuer Convert the read item into a value * @return The glyphset passed in as a parameter (now with more glyphs) */ public static <G,I> Glyphset<G,I> load( Glyphset<G,I> glyphs, DelimitedReader reader, Indexed.Converter converter, Shaper<Indexed, G> shaper, Valuer<Indexed, I> valuer) { int count =0; Method m; try {m = glyphs.getClass().getMethod("add", Glyph.class);} catch (NoSuchMethodException | SecurityException e1) {throw new IllegalArgumentException("Cannot access 'add' on the passed glypshet.", e1);} m.setAccessible(true); //Suppress java access checking. Allows access to (for example) public methods of private classes while (reader.hasNext()) { String[] parts = reader.next(); if (parts == null) {continue;} Converter item = converter.applyTo(parts); I value = valuer.apply(item); G shape = shaper.apply(item); Glyph<G,I> g = new SimpleGlyph<G,I>(shape, value); try {m.invoke(glyphs, g);} catch (Exception e) {throw new RuntimeException("Error loading item number " + count, e);} count++; } //The check below causes an issue if memory is tight...the check has a non-trivial overhead on some glyphset types if (count != glyphs.size()) {throw new RuntimeException(String.format("Error loading data; Read and retained glyph counts don't match (%s read vs %s retained).", count, glyphs.size()));} return glyphs; } /**Sort a set of colors.**/ public static final Comparator<Color> COLOR_SORTER = new Comparator<Color>() { public int compare(Color o1, Color o2) { return o1.getRGB() - o2.getRGB(); } }; /**Create an indentation string of x*2 spaces.**/ public static String indent(int x) { char[] chars = new char[x*2]; Arrays.fill(chars,' '); return new String(chars); } /**Adds two rectangles together, updating the first so it is a bounds over the whole. * Unlike Rectangle2D.add, this method treats NaN as if it were zero. */ public static void add(Rectangle2D target, Rectangle2D more) { double x = more.getX(); double y = more.getY(); double w = more.getWidth(); double h = more.getHeight(); x = Double.isNaN(x) ? 0 : x; y = Double.isNaN(y) ? 0 : y; w = Double.isNaN(w) ? 0 : w; h = Double.isNaN(h) ? 0 : h; target.add(new Rectangle2D.Double(x,y,w,h)); } /**Linear interpolation between two colors. * * @param low Color to associate with the min value * @param high Color to associate with the max value * @param min Smallest value that will be passed * @param max Largest value that will be passed * @param v Current value * **/ public static Color interpolate(Color low, Color high, double min, double max, double v) { if (v>max) {v=max;} if (v<min) {v=min;} double distance = 1-((max-v)/(max-min)); if (Double.isNaN(distance) || Double.isInfinite(distance)) {return high;} int r = (int) weightedAverage(high.getRed(), low.getRed(), distance); int g = (int) weightedAverage(high.getGreen(), low.getGreen(), distance); int b = (int) weightedAverage(high.getBlue(), low.getBlue(), distance); int a = (int) weightedAverage(high.getAlpha(), low.getAlpha(), distance); return new java.awt.Color(r,g,b,a); } /**Weighted average between two values * * @param min The lowest value to expect * @param max The highest value to expect * @param p The desired percentage offset between max and min * @return The resulting value */ public static double weightedAverage(double min, double max, double p) { return (min-max) * p + max; } public static <N extends Number> Stats<N> stats(Aggregates<? extends N> aggregates) {return stats(aggregates, true, true, true, true);} /**What is the min/max/mean/stdev in the collection of aggregates (assuming its over numbers). * * By default default values, NaNs, Nulls and infinity are fully skipped. * However, if the relevant parameters set to false they will be included in the "count" basis and thus influence the mean. * * **/ public static <N extends Number> Stats<N> stats( Aggregates<? extends N> aggregates, boolean ignoreDefault, boolean ignoreNulls, boolean ignoreNaNs, boolean ignoreInfinity) { //For a single-pass std. dev is based on: http://en.wikipedia.org/wiki/Standard_deviation#Rapid_calculation_methods long count=0; long nullCount=0; long nanCount=0; long infCount=0; long defCount=0; N min = null; N max = null; double sum=0; for (N n: aggregates) { if (n == null) {nullCount++; continue;} double v = n.doubleValue(); if (Double.isNaN(v)) {nanCount++; continue;} if (Double.isInfinite(v)) {infCount++; continue;} if (aggregates.defaultValue().equals(n)) { defCount++; continue; } if (min == null || min.doubleValue() > v) {min = n;} if (max == null || max.doubleValue() < v) {max = n;} sum += v; count++; } final long fullCount = count + (ignoreDefault ? 0 : defCount) + (ignoreNulls ? 0 : nullCount) + (ignoreNaNs ? 0 : nanCount) + (ignoreInfinity ? 0 : infCount); final double mean = sum/fullCount; double acc =0; for (Number n: aggregates) { if (n == null || Double.isNaN(n.doubleValue())) {continue;} acc = Math.pow((n.doubleValue()-mean),2); } double stdev = Math.sqrt(acc/fullCount); return new Stats<>(min,max,mean,stdev,defCount, nullCount,nanCount); } /**Wrapper class for statistical values derived from a common source.**/ @SuppressWarnings("javadoc") public static final class Stats<N extends Number> { public final N min; public final N max; public final double mean; public final double stdev; public final long nullCount; public final long nanCount; public final long defaultCount; public Stats(N min, N max, double mean, double stdev, long defCount, long nullCount, long nanCount) { this.min = min; this.max=max; this.mean=mean; this.stdev = stdev; this.defaultCount = defCount; this.nullCount = nullCount; this.nanCount = nanCount; } public String toString() { if (min instanceof Integer || min instanceof Long) { return String.format("Min: %d; Max: %d; Mean: %.3f; Stdev: %.3f", min,max,mean,stdev); } else { return String.format("Min: %.3f; Max: %.3f; Mean: %.3f; Stdev: %.3f", min,max,mean,stdev); } } } /**Null-safe .equals caller.**/ public static <T> boolean isEqual(T one, T two) { return one == two || (one != null && one.equals(two)); } /**Calculate the affine transform to fit a box of the given size/location onto a 0,0,width,height space.**/ public static AffineTransform zoomFit(Rectangle2D content, int width, int height) { if (content == null) {return new AffineTransform();} double ws = width/content.getWidth(); double hs = height/content.getHeight(); double scale = Math.min(ws, hs); double xmargin = width/scale-content.getWidth(); double ymargin = height/scale-content.getHeight(); double tx = content.getMinX()-(xmargin/2); double ty = content.getMinY()-(ymargin/2); AffineTransform t = AffineTransform.getScaleInstance(scale,scale); t.translate(-tx,-ty); return t; } public static void writeImage(final BufferedImage src, File f) {writeImage(src, f, true);} /**Write a buffered image to a file.**/ public static void writeImage(BufferedImage src, File f, boolean removeAlpha) { try { if (f.getParentFile() != null && !f.getParentFile().exists()) { f.getParentFile().mkdirs(); } if (removeAlpha) { //Remove alpha component because it was causing problems on some machines. BufferedImage noAlpha = new BufferedImage(src.getWidth(), src.getHeight(), BufferedImage.TYPE_INT_RGB); Color bgColor = Color.white; for (int x=0; x<src.getWidth(); x++) { for (int y=0; y<src.getHeight();y++) { Color fgColor = new Color(src.getRGB(x,y), true); noAlpha.setRGB(x, y, premultiplyAlpha(fgColor, bgColor).getRGB()); } } src = noAlpha; } if (!f.getName().toUpperCase().endsWith("PNG")) {f = new File(f.getName()+".png");} if (!ImageIO.write(src, "PNG", f)) {throw new RuntimeException("Could not find encoder for file:"+f.getName());} }catch (Exception e) { throw new RuntimeException(e); } } public static final Color premultiplyAlpha(Color fgColor, Color bgColor) { int r, g, b; int fgAlpha = fgColor.getAlpha(); r = fgColor.getRed() * fgAlpha + bgColor.getRed() * (255 - fgAlpha); g = fgColor.getGreen() * fgAlpha + bgColor.getGreen() * (255 - fgAlpha); b = fgColor.getBlue() * fgAlpha + bgColor.getBlue() * (255 - fgAlpha); Color result = new Color(r / 255, g / 255, b / 255); return result; } /**Comparator to wrap the compareTo method of comparable items.**/ public static class ComparableComparator<T extends Comparable<T>> implements Comparator<T>, Serializable { public int compare(T lhs, T rhs) {return lhs.compareTo(rhs);} } /**Insert a value into an array at the given index.**/ public static final <T> T[] insertInto(T[] values, T value, int at) { @SuppressWarnings("unchecked") T[] newValues = (T[]) Array.newInstance(value.getClass(), values.length+1); System.arraycopy(values, 0, newValues, 0, at); newValues[at] = value; System.arraycopy(values, at, newValues, at+1, values.length-at); return newValues; } /**Insert a value into an array at the given index.**/ public static final int[] insertInto(int[] values, int value, int at) { int[] newValues = new int[values.length+1]; System.arraycopy(values, 0, newValues, 0, at); newValues[at] = value; System.arraycopy(values, at, newValues, at+1, values.length-at); return newValues; } /**Remove a the item at the given index from the array.**/ public static final double[] removeFrom(double[] source, int idx) { double[] rslt = Arrays.copyOf(source, source.length-1); System.arraycopy(source, idx+1, rslt, idx, source.length-idx-1); return rslt; } /**Print the contents of an int[].**/ public static String deepToString(int[] values) { StringBuilder b = new StringBuilder(); b.append("["); for (int i=0; i<values.length;i++) { b.append(values[i]); b.append(", "); } b.delete(b.length()-2, b.length()-1); b.append("]"); return b.toString(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.carbondata.presto; import org.apache.carbondata.core.metadata.datatype.DataTypes; import org.apache.carbondata.core.metadata.schema.table.column.CarbonColumn; import org.apache.carbondata.presto.impl.CarbonTableReader; import com.facebook.presto.spi.*; import com.facebook.presto.spi.classloader.ThreadContextClassLoader; import com.facebook.presto.spi.connector.ConnectorMetadata; import com.facebook.presto.spi.type.*; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import org.apache.carbondata.core.metadata.datatype.DataType; import org.apache.carbondata.core.metadata.schema.table.CarbonTable; import org.apache.carbondata.core.metadata.schema.table.column.CarbonDimension; import org.apache.carbondata.core.metadata.schema.table.column.CarbonMeasure; import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema; import javax.inject.Inject; import java.util.*; import static org.apache.carbondata.presto.Types.checkType; import static com.google.common.base.Preconditions.checkArgument; import static java.util.Objects.requireNonNull; public class CarbondataMetadata implements ConnectorMetadata { private final String connectorId; private CarbonTableReader carbonTableReader; private ClassLoader classLoader; private Map<String, ColumnHandle> columnHandleMap; @Inject public CarbondataMetadata(CarbondataConnectorId connectorId, CarbonTableReader reader) { this.connectorId = requireNonNull(connectorId, "connectorId is null").toString(); this.carbonTableReader = requireNonNull(reader, "client is null"); } public void putClassLoader(ClassLoader classLoader) { this.classLoader = classLoader; } @Override public List<String> listSchemaNames(ConnectorSession session) { return listSchemaNamesInternal(); } public List<String> listSchemaNamesInternal() { List<String> schemaNameList; try (ThreadContextClassLoader ignored = new ThreadContextClassLoader(classLoader)) { schemaNameList = carbonTableReader.getSchemaNames(); } return schemaNameList; } @Override public List<SchemaTableName> listTables(ConnectorSession session, String schemaNameOrNull) { List<String> schemaNames; if (schemaNameOrNull != null) { schemaNames = ImmutableList.of(schemaNameOrNull); } else { schemaNames = carbonTableReader.getSchemaNames(); } ImmutableList.Builder<SchemaTableName> builder = ImmutableList.builder(); for (String schemaName : schemaNames) { for (String tableName : carbonTableReader.getTableNames(schemaName)) { builder.add(new SchemaTableName(schemaName, tableName)); } } return builder.build(); } @Override public Map<SchemaTableName, List<ColumnMetadata>> listTableColumns(ConnectorSession session, SchemaTablePrefix prefix) { requireNonNull(prefix, "SchemaTablePrefix is null"); ImmutableMap.Builder<SchemaTableName, List<ColumnMetadata>> columns = ImmutableMap.builder(); for (SchemaTableName tableName : listTables(session, prefix)) { ConnectorTableMetadata tableMetadata = getTableMetadata(tableName); if (tableMetadata != null) { columns.put(tableName, tableMetadata.getColumns()); } } return columns.build(); } //if prefix is null. return all tables //if prefix is not null, just return this table private List<SchemaTableName> listTables(ConnectorSession session, SchemaTablePrefix prefix) { if (prefix.getSchemaName() == null) { return listTables(session, prefix.getSchemaName()); } return ImmutableList.of(new SchemaTableName(prefix.getSchemaName(), prefix.getTableName())); } private ConnectorTableMetadata getTableMetadata(SchemaTableName schemaTableName) { if (!listSchemaNamesInternal().contains(schemaTableName.getSchemaName())) { throw new SchemaNotFoundException(schemaTableName.getSchemaName()); } CarbonTable carbonTable = carbonTableReader.getTable(schemaTableName); List<ColumnMetadata> columnsMetaList = new LinkedList<>(); List<CarbonColumn> carbonColumns = carbonTable.getCreateOrderColumn(schemaTableName.getTableName()); for (CarbonColumn col : carbonColumns) { //show columns command will return these data Type columnType = carbonDataType2SpiMapper(col.getColumnSchema()); ColumnMetadata columnMeta = new ColumnMetadata(col.getColumnSchema().getColumnName(), columnType); columnsMetaList.add(columnMeta); } //carbondata connector's table metadata return new ConnectorTableMetadata(schemaTableName, columnsMetaList); } @Override public Map<String, ColumnHandle> getColumnHandles(ConnectorSession session, ConnectorTableHandle tableHandle) { CarbondataTableHandle handle = checkType(tableHandle, CarbondataTableHandle.class, "tableHandle"); checkArgument(handle.getConnectorId().equals(connectorId), "tableHandle is not for this connector"); String schemaName = handle.getSchemaTableName().getSchemaName(); if (!listSchemaNamesInternal().contains(schemaName)) { throw new SchemaNotFoundException(schemaName); } //CarbonTable(official struct) is stored in CarbonMetadata(official struct) CarbonTable cb = carbonTableReader.getTable(handle.getSchemaTableName()); ImmutableMap.Builder<String, ColumnHandle> columnHandles = ImmutableMap.builder(); String tableName = handle.getSchemaTableName().getTableName(); for (CarbonDimension column : cb.getDimensionByTableName(tableName)) { ColumnSchema cs = column.getColumnSchema(); Type spiType = carbonDataType2SpiMapper(cs); columnHandles.put(cs.getColumnName(), new CarbondataColumnHandle(connectorId, cs.getColumnName(), spiType, column.getSchemaOrdinal(), column.getKeyOrdinal(), column.getColumnGroupOrdinal(), false, cs.getColumnGroupId(), cs.getColumnUniqueId(), cs.isUseInvertedIndex(), cs.getPrecision(), cs.getScale())); } for (CarbonMeasure measure : cb.getMeasureByTableName(tableName)) { ColumnSchema cs = measure.getColumnSchema(); Type spiType = carbonDataType2SpiMapper(cs); columnHandles.put(cs.getColumnName(), new CarbondataColumnHandle(connectorId, cs.getColumnName(), spiType, cs.getSchemaOrdinal(), measure.getOrdinal(), cs.getColumnGroupId(), true, cs.getColumnGroupId(), cs.getColumnUniqueId(), cs.isUseInvertedIndex(), cs.getPrecision(), cs.getScale())); } //should i cache it? columnHandleMap = columnHandles.build(); return columnHandleMap; } @Override public ColumnMetadata getColumnMetadata(ConnectorSession session, ConnectorTableHandle tableHandle, ColumnHandle columnHandle) { checkType(tableHandle, CarbondataTableHandle.class, "tableHandle"); return checkType(columnHandle, CarbondataColumnHandle.class, "columnHandle") .getColumnMetadata(); } @Override public ConnectorTableHandle getTableHandle(ConnectorSession session, SchemaTableName tableName) { //check tablename is valid //schema is exist //tables is exist //CarbondataTable get from jar return new CarbondataTableHandle(connectorId, tableName); } @Override public List<ConnectorTableLayoutResult> getTableLayouts(ConnectorSession session, ConnectorTableHandle table, Constraint<ColumnHandle> constraint, Optional<Set<ColumnHandle>> desiredColumns) { CarbondataTableHandle handle = checkType(table, CarbondataTableHandle.class, "table"); ConnectorTableLayout layout = new ConnectorTableLayout( new CarbondataTableLayoutHandle(handle, constraint.getSummary()/*, constraint.getPredicateMap(),constraint.getFilterTuples()*/)); return ImmutableList.of(new ConnectorTableLayoutResult(layout, constraint.getSummary())); } @Override public ConnectorTableLayout getTableLayout(ConnectorSession session, ConnectorTableLayoutHandle handle) { return new ConnectorTableLayout(handle); } @Override public ConnectorTableMetadata getTableMetadata(ConnectorSession session, ConnectorTableHandle table) { return getTableMetadataInternal(table); } public ConnectorTableMetadata getTableMetadataInternal(ConnectorTableHandle table) { CarbondataTableHandle carbondataTableHandle = checkType(table, CarbondataTableHandle.class, "table"); checkArgument(carbondataTableHandle.getConnectorId().equals(connectorId), "tableHandle is not for this connector"); return getTableMetadata(carbondataTableHandle.getSchemaTableName()); } public static Type carbonDataType2SpiMapper(ColumnSchema columnSchema) { DataType colType = columnSchema.getDataType(); if (colType == DataTypes.BOOLEAN) { return BooleanType.BOOLEAN; } else if (colType == DataTypes.SHORT) { return SmallintType.SMALLINT; } else if (colType == DataTypes.INT) { return IntegerType.INTEGER; } else if (colType == DataTypes.LONG) { return BigintType.BIGINT; } else if (colType == DataTypes.FLOAT || colType == DataTypes.DOUBLE) { return DoubleType.DOUBLE; } else if (DataTypes.isDecimal(colType)) { if (columnSchema.getPrecision() > 0) { return DecimalType.createDecimalType(columnSchema.getPrecision(), columnSchema.getScale()); } else { return DecimalType.createDecimalType(); } } else if (colType == DataTypes.STRING) { return VarcharType.VARCHAR; } else if (colType == DataTypes.DATE) { return DateType.DATE; } else if (colType == DataTypes.TIMESTAMP) { return TimestampType.TIMESTAMP; } else { return VarcharType.VARCHAR; } } }
/* * Conditions Of Use * * This software was developed by employees of the National Institute of * Standards and Technology (NIST), an agency of the Federal Government. * Pursuant to title 15 Untied States Code Section 105, works of NIST * employees are not subject to copyright protection in the United States * and are considered to be in the public domain. As a result, a formal * license is not needed to use the software. * * This software is provided by NIST as a service and is expressly * provided "AS IS." NIST MAKES NO WARRANTY OF ANY KIND, EXPRESS, IMPLIED * OR STATUTORY, INCLUDING, WITHOUT LIMITATION, THE IMPLIED WARRANTY OF * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NON-INFRINGEMENT * AND DATA ACCURACY. NIST does not warrant or make any representations * regarding the use of the software or the results thereof, including but * not limited to the correctness, accuracy, reliability or usefulness of * the software. * * Permission to use this software is contingent upon your acceptance * of the terms of this agreement * * . * */ package gov.nist.javax.sip.parser; import gov.nist.javax.sip.header.*; import gov.nist.core.*; import java.text.ParseException; /** * Parser for via headers. * * @version 1.2 $Revision: 1.13 $ $Date: 2010-05-06 14:07:45 $ * @since 1.1 * * @author Olivier Deruelle * @author M. Ranganathan */ public class ViaParser extends HeaderParser { public ViaParser(String via) { super(via); } public ViaParser(Lexer lexer) { super(lexer); } /** * a parser for the essential part of the via header. */ private void parseVia(Via v) throws ParseException { // The protocol lexer.match(TokenTypes.ID); Token protocolName = lexer.getNextToken(); this.lexer.SPorHT(); // consume the "/" lexer.match('/'); this.lexer.SPorHT(); lexer.match(TokenTypes.ID); this.lexer.SPorHT(); Token protocolVersion = lexer.getNextToken(); this.lexer.SPorHT(); // We consume the "/" lexer.match('/'); this.lexer.SPorHT(); lexer.match(TokenTypes.ID); this.lexer.SPorHT(); Token transport = lexer.getNextToken(); this.lexer.SPorHT(); Protocol protocol = new Protocol(); protocol.setProtocolName(protocolName.getTokenValue()); protocol.setProtocolVersion(protocolVersion.getTokenValue()); protocol.setTransport(transport.getTokenValue()); v.setSentProtocol(protocol); // sent-By HostNameParser hnp = new HostNameParser(this.getLexer()); HostPort hostPort = hnp.hostPort( true ); v.setSentBy(hostPort); // Ignore blanks this.lexer.SPorHT(); // parameters while (lexer.lookAhead(0) == ';') { this.lexer.consume(1); this.lexer.SPorHT(); NameValue nameValue = this.nameValue(); String name = nameValue.getName(); if (name.equals(Via.BRANCH)) { String branchId = (String) nameValue.getValueAsObject(); if (branchId == null) throw new ParseException("null branch Id", lexer.getPtr()); } v.setParameter(nameValue); this.lexer.SPorHT(); } // // JvB Note: RFC3261 does not allow a comment in Via headers anymore // if (lexer.lookAhead(0) == '(') { this.lexer.selectLexer("charLexer"); lexer.consume(1); StringBuilder comment = new StringBuilder(); while (true) { char ch = lexer.lookAhead(0); if (ch == ')') { lexer.consume(1); break; } else if (ch == '\\') { // Escaped character Token tok = lexer.getNextToken(); comment.append(tok.getTokenValue()); lexer.consume(1); tok = lexer.getNextToken(); comment.append(tok.getTokenValue()); lexer.consume(1); } else if (ch == '\n') { break; } else { comment.append(ch); lexer.consume(1); } } v.setComment(comment.toString()); } } /** * Overrides the superclass nameValue parser because we have to tolerate * IPV6 addresses in the received parameter. */ protected NameValue nameValue() throws ParseException { if (debug) dbg_enter("nameValue"); try { lexer.match(LexerCore.ID); Token name = lexer.getNextToken(); // eat white space. lexer.SPorHT(); try { boolean quoted = false; char la = lexer.lookAhead(0); if (la == '=') { lexer.consume(1); lexer.SPorHT(); String str = null; if (name.getTokenValue().compareToIgnoreCase(Via.RECEIVED) == 0) { // Allow for IPV6 Addresses. // these could have : in them! str = lexer.byteStringNoSemicolon(); } else { if (lexer.lookAhead(0) == '\"') { str = lexer.quotedString(); quoted = true; } else { lexer.match(LexerCore.ID); Token value = lexer.getNextToken(); str = value.getTokenValue(); } } NameValue nv = new NameValue(name.getTokenValue() .toLowerCase(), str); if (quoted) nv.setQuotedValue(); return nv; } else { return new NameValue(name.getTokenValue().toLowerCase(), null); } } catch (ParseException ex) { return new NameValue(name.getTokenValue(), null); } } finally { if (debug) dbg_leave("nameValue"); } } public SIPHeader parse() throws ParseException { if (debug) dbg_enter("parse"); try { ViaList viaList = new ViaList(); // The first via header. this.lexer.match(TokenTypes.VIA); this.lexer.SPorHT(); // ignore blanks this.lexer.match(':'); // expect a colon. this.lexer.SPorHT(); // ingore blanks. while (true) { Via v = new Via(); parseVia(v); viaList.add(v); this.lexer.SPorHT(); // eat whitespace. if (this.lexer.lookAhead(0) == ',') { this.lexer.consume(1); // Consume the comma this.lexer.SPorHT(); // Ignore space after. } if (this.lexer.lookAhead(0) == '\n') break; } this.lexer.match('\n'); return viaList; } finally { if (debug) dbg_leave("parse"); } } /** * * public static void main(String args[]) throws ParseException { String * via[] = { "Via: SIP/2.0/UDP 135.180.130.133;branch=-12345\n", "Via: * SIP/2.0/UDP 166.34.120.100;branch=0000045d-00000001"+ ",SIP/2.0/UDP * 166.35.224.216:5000\n", "Via: SIP/2.0/UDP sip33.example.com,"+ " * SIP/2.0/UDP sip32.example.com (oli),"+ "SIP/2.0/UDP sip31.example.com\n", * "Via: SIP/2.0/UDP host.example.com;received=::133;"+ " * branch=C1C3344E2710000000E299E568E7potato10potato0potato0\n", "Via: * SIP/2.0/UDP host.example.com;received=135.180.130.133;"+ " * branch=C1C3344E2710000000E299E568E7potato10potato0potato0\n", "Via: * SIP/2.0/UDP company.com:5604 ( Hello )"+ ", SIP / 2.0 / UDP * 135.180.130.133\n", "Via: SIP/2.0/UDP * 129.6.55.9:7060;received=stinkbug.antd.nist.gov\n", * * "Via: SIP/2.0/UDP ss2.wcom.com:5060;branch=721e418c4.1"+ ", SIP/2.0/UDP * ss1.wcom.com:5060;branch=2d4790.1"+ " , SIP/2.0/UDP here.com:5060( Hello * the big world) \n" ,"Via: SIP/2.0/UDP * ss1.wcom.com:5060;branch=2d4790.1\n", "Via: SIP/2.0/UDP * first.example.com:4000;ttl=16"+ ";maddr=224.2.0.1 ;branch=a7c6a8dlze.1 * (Acme server)\n" }; * * for (int i = 0; i < via.length; i++ ) { ViaParser vp = new * ViaParser(via[i]); System.out.println("toParse = " + via[i]); ViaList vl = * (ViaList) vp.parse(); System.out.println("encoded = " + vl.encode()); } * } * */ }
/** * Licensed to Odiago, Inc. under one or more contributor license * agreements. See the NOTICE.txt file distributed with this work for * additional information regarding copyright ownership. Odiago, Inc. * licenses this file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the * License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.odiago.flumebase.lang; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.odiago.flumebase.exec.AliasSymbol; import com.odiago.flumebase.exec.AssignedSymbol; import com.odiago.flumebase.exec.HashSymbolTable; import com.odiago.flumebase.exec.Symbol; import com.odiago.flumebase.exec.SymbolTable; import com.odiago.flumebase.exec.WindowSymbol; import com.odiago.flumebase.lang.TypeChecker; import com.odiago.flumebase.parser.AliasedExpr; import com.odiago.flumebase.parser.AllFieldsExpr; import com.odiago.flumebase.parser.BinExpr; import com.odiago.flumebase.parser.ConstExpr; import com.odiago.flumebase.parser.CreateStreamStmt; import com.odiago.flumebase.parser.DescribeStmt; import com.odiago.flumebase.parser.DropStmt; import com.odiago.flumebase.parser.EntityTarget; import com.odiago.flumebase.parser.ExplainStmt; import com.odiago.flumebase.parser.Expr; import com.odiago.flumebase.parser.FnCallExpr; import com.odiago.flumebase.parser.FormatSpec; import com.odiago.flumebase.parser.GroupBy; import com.odiago.flumebase.parser.IdentifierExpr; import com.odiago.flumebase.parser.JoinedSource; import com.odiago.flumebase.parser.LiteralSource; import com.odiago.flumebase.parser.RangeSpec; import com.odiago.flumebase.parser.RecordSource; import com.odiago.flumebase.parser.SQLStatement; import com.odiago.flumebase.parser.SelectStmt; import com.odiago.flumebase.parser.ShowStmt; import com.odiago.flumebase.parser.TypedField; import com.odiago.flumebase.parser.UnaryExpr; import com.odiago.flumebase.parser.WindowDef; import com.odiago.flumebase.parser.WindowSpec; import com.odiago.flumebase.util.Ref; import com.odiago.flumebase.util.Stack; import com.odiago.flumebase.util.StringUtils; /** * Run a type-checker over all elements of the AST. */ public class TypeChecker extends Visitor { private static final Logger LOG = LoggerFactory.getLogger( TypeChecker.class.getName()); /** Stack containing the symbol table for the current visit context. */ private Stack<SymbolTable> mSymTableContext; /** * Indicates how many nested SELECT statements deep we are into the * complete statement. The top-most SELECT is #1. */ private int mSelectNestingDepth; /** * Holds the id number to assign to the next field of a literal source. */ private Ref<Integer> mNextFieldId; public TypeChecker(SymbolTable rootSymbolTable) { mSymTableContext = new Stack<SymbolTable>(); mSymTableContext.push(rootSymbolTable); mSelectNestingDepth = 0; mNextFieldId = new Ref<Integer>(Integer.valueOf(0)); } @Override protected void visit(CreateStreamStmt s) throws VisitException { s.getFormatSpec().accept(this); } @Override protected void visit(FormatSpec s) throws VisitException { // TODO: Typecheck the FormatSpec; make sure the format describes // a real format that exists in the symbol table (each format should // have one; it should be like a builtin function). } @Override protected void visit(DropStmt s) throws VisitException { // Check that the DROP ____ type matches the type of the object to be dropped. SymbolTable symtab = mSymTableContext.top(); String name = s.getName(); Symbol sym = symtab.resolve(name); if (null == sym) { throw new TypeCheckException("No such object at top level: " + name); } EntityTarget targetType = s.getType(); Type.TypeName symType = sym.getType().getTypeName(); // Check that the DROP ___ type matches the symbol type. if (EntityTarget.Stream.equals(targetType) && !Type.TypeName.STREAM.equals(symType)) { throw new TypeCheckException("Entity " + name + " has incorrect type: " + symType); } else if (EntityTarget.Flow.equals(targetType) && !Type.TypeName.FLOW.equals(symType)) { throw new TypeCheckException("Entity " + name + " has incorrect type: " + symType); } } @Override protected void visit(ShowStmt s) throws VisitException { // Nothing to do. } @Override protected void visit(LiteralSource s) throws VisitException { SymbolTable symtab = mSymTableContext.top(); String name = s.getName(); LOG.debug("Visiting literalsrc " + name); Symbol symbol = symtab.resolve(name); if (null == symbol) { throw new TypeCheckException("No such identifier: " + name); } else if (symbol.getType().getTypeName() != Type.TypeName.STREAM) { throw new TypeCheckException("Identifier " + name + " is not a stream (type=" + symbol.getType()); } // Add a new symbol table layer containing the named stream's symbols. SymbolTable sourceTable = s.getFieldsSymbolTable(symtab, mNextFieldId); // Push it on top of the stack. mSymTableContext.push(sourceTable); } /** * Check if 's' is a valid SQLStatement subclass to be a source for a SELECT * statement. If so, visit it. Otherwise, throw an exception. */ private void visitValidSource(SQLStatement source) throws VisitException { if (source instanceof RecordSource) { // Note that this will push a new symbol table on the stack. source.accept(this); } else { throw new TypeCheckException("Invalid source in FROM clause; this must be " + "an identifier, or a SELECT statement. Got " + source.getClass().getName()); } } /** * Create symbols for the specified field and install them in the symbol * table. * * @param symtab the symbol table where the symbols go * @param streamName the name of the stream/select stmt that's generating the * field. * @param fieldName the field's own name. * @param assignedName the unique label used to transmit values via avro. * @param type the type of the field. * * <p> As an output symbol, it is available as 'streamName.fieldName' if * streamName is not null, and also as 'fieldName'. The latter is an alias to * the former, if streamName is not null. </p> */ private void createSymbols(SymbolTable symtab, String streamName, String fieldName, String assignedName, Type type) { if (null != streamName) { AssignedSymbol sym = new AssignedSymbol(streamName + "." + fieldName, type, assignedName, IdentifierExpr.AccessType.FIELD); sym.setParentName(streamName); symtab.addSymbol(sym); symtab.addSymbol(new AliasSymbol(fieldName, sym)); } else { symtab.addSymbol(new AssignedSymbol(fieldName, type, assignedName, IdentifierExpr.AccessType.FIELD)); } } @Override protected void visit(SelectStmt s) throws VisitException { SymbolTable outTable = null; mSelectNestingDepth++; // Visiting a source pushes one or more symbol tables on the stack, // declaring the fields of this source. While visiting our clauses, we // also push a symbol table declaring the names of any windows defined // with WINDOW .. AS clauses. Make sure we pop them on our way out by // resetting the stack to its current height. int symbolStackHeight = mSymTableContext.size(); SymbolTable originalSymtab = mSymTableContext.top(); try { // Visit the window clauses first; if their types are okay, create a new // symbol table to use when visiting our sources, that contains the // window names. SymbolTable symbolsForSources = new HashSymbolTable(originalSymtab); List<WindowDef> windowDefs = s.getWindowDefs(); for (WindowDef def : windowDefs) { def.accept(this); symbolsForSources.addSymbol(new WindowSymbol(def.getName(), def.getWindowSpec())); } mSymTableContext.push(symbolsForSources); // Now visit the sources, with the symbols for any windows pushed. SQLStatement source = s.getSource(); visitValidSource(source); SymbolTable exprTable = mSymTableContext.top(); outTable = new HashSymbolTable(originalSymtab); // The "stream name" representing this SELECT stmt in the parent // statement. String stmtAlias = s.getAlias(); // Nested SELECT statements require an alias. if (mSelectNestingDepth > 1 && null == stmtAlias) { throw new TypeCheckException("Each derived stream must have its own alias."); } // Type check all the selected expressions using the symbols from our source. for (AliasedExpr aliasedExpr : s.getSelectExprs()) { aliasedExpr.accept(this); // Add our output symbols to the output symbol table. if (aliasedExpr.getExpr() instanceof AllFieldsExpr) { // Add all symbols in the source's table into this one, // to add fields pulled in by the "*" operator. // Resolve away all aliased symbols to their final version, and rename // any "qualifier.field" -> "field". Iterator<Symbol> sourceSymbols = exprTable.levelIterator(); while (sourceSymbols.hasNext()) { Symbol srcSym = sourceSymbols.next().resolveAliases(); String symName = StringUtils.dequalify(srcSym.getName()); if (null != stmtAlias) { Symbol sym = srcSym.withName(stmtAlias + "." + symName); outTable.addSymbol(sym); outTable.addSymbol(new AliasSymbol(symName, sym)); } else { outTable.addSymbol(srcSym.withName(symName)); } } } else if (aliasedExpr.getExpr() instanceof IdentifierExpr) { // AliasedExpr entries which are just IdentifierExprs were // not handled in AssignFieldLabelsVisitor. Now that the // IdentifierExpr has been visited, resolve its avro name here. IdentifierExpr ident = (IdentifierExpr) aliasedExpr.getExpr(); String name = aliasedExpr.getUserAlias(); String assignedName = ident.getAssignedName(); if (!ident.getIdentifier().startsWith("#")) { // Use the avro label of the identified field. aliasedExpr.setAvroLabel(assignedName); } Type type = ident.getType(exprTable); // Create symbols for the output SymbolTable. createSymbols(outTable, stmtAlias, name, assignedName, type); } else { // NOTE: This relies on aliasedExpr.getUserAlias() being filled; // this is done in the AssignFieldLabelsVisitor, which is run first. String name = aliasedExpr.getUserAlias(); // This is guaranteed to not contain a '.'. assert name.contains(".") == false; Type type = aliasedExpr.getExpr().getType(exprTable); createSymbols(outTable, stmtAlias, name, aliasedExpr.getAvroLabel(), type); } } // Check the where clause for validity if it's non-null. Expr where = s.getWhereConditions(); if (null != where) { where.accept(this); // The where clause must evaluate to a boolean value. Type whereType = where.getType(exprTable); if (!whereType.promotesTo(Type.getNullable(Type.TypeName.BOOLEAN))) { throw new TypeCheckException("Expected where clause with boolean type, not " + whereType); } } // Check the GROUP BY clause for validity if it's non-null. GroupBy groupBy = s.getGroupBy(); if (null != groupBy) { groupBy.accept(this); } // Check the OVER clause for validity if it's non-null. // This must evaluate to a value of type WINDOW. Expr windowOver = s.getWindowOver(); if (null != windowOver) { windowOver.accept(this); Type winType = windowOver.getType(exprTable); if (!winType.equals(Type.getPrimitive(Type.TypeName.WINDOW))) { throw new TypeCheckException("SELECT ... OVER clause requires a window, not an " + "identifier of type " + winType); } } } finally { // Pop the source symbol tables from the stack. mSymTableContext.reset(symbolStackHeight); mSelectNestingDepth--; } // Push our output symbols on the stack so any higher-level select stmt can // type check against them. Memorize the symbols for this statement in the // statement object itself. if (null != outTable) { mSymTableContext.push(outTable); s.setFieldSymbols(outTable.cloneLevel()); } // HAVING clause uses the output symbol names. It can only operate on // fields already explicitly selected by the user -- if these are not // already present, this will fail. Expr having = s.getHaving(); if (null != having) { having.accept(this); // The having clause must evaluate to a boolean value. Type havingType = having.getType(outTable); if (!havingType.promotesTo(Type.getNullable(Type.TypeName.BOOLEAN))) { throw new TypeCheckException("Expected having clause with boolean type, not " + havingType); } } } @Override protected void visit(JoinedSource s) throws VisitException { SQLStatement leftSrc = s.getLeft(); SQLStatement rightSrc = s.getRight(); LOG.debug("Visiting joinedsrc"); int symtabHeight = mSymTableContext.size(); visitValidSource(leftSrc); visitValidSource(rightSrc); // Verify: exactly one symbol table pushed per source. assert(mSymTableContext.size() == symtabHeight + 2); // Each of these sources has pushed one symbol table on the stack. // Merge them together, removing duplicate symbols. These must be // referred to by "qualified.alias" only. SymbolTable rightSymTab = mSymTableContext.pop(); SymbolTable leftSymTab = mSymTableContext.pop(); SymbolTable symTab = SymbolTable.mergeSymbols(leftSymTab, rightSymTab, mSymTableContext.top()); s.setJoinedSymbols(symTab); mSymTableContext.push(symTab); // Verify that the join expression is BOOLEAN. Expr joinExpr = s.getJoinExpr(); joinExpr.accept(this); Type joinType = joinExpr.getType(symTab); if (!joinType.promotesTo(Type.getNullable(Type.TypeName.BOOLEAN))) { throw new TypeCheckException("JOIN ... ON clause requires boolean test expression, not " + joinExpr.toStringOneLine()); } // Make sure the "OVER" clause joins over a Window. Expr windowExpr = s.getWindowExpr(); windowExpr.accept(this); Type winType = windowExpr.getType(symTab); if (!winType.equals(Type.getPrimitive(Type.TypeName.WINDOW))) { throw new TypeCheckException("JOIN ... OVER clause requires a window, not an " + "identifier of type " + winType); } } @Override protected void visit(DescribeStmt s) throws VisitException { // Check the symbol table that the identifier exists. String id = s.getIdentifier(); SymbolTable symtab = mSymTableContext.top(); Symbol symbol = symtab.resolve(id); if (null == symbol) { throw new TypeCheckException("No such identifier: " + id); } } @Override protected void visit(ExplainStmt s) throws VisitException { s.getChildStmt().accept(this); } @Override protected void visit(AliasedExpr e) throws VisitException { Expr subExpr = e.getExpr(); String userAlias = e.getUserAlias(); // If the sub-expression is just a '*', this can't have an alias. // ("SELECT * AS bla FROM ..." is illegal.) if (subExpr instanceof AllFieldsExpr && userAlias != null) { throw new TypeCheckException("Cannot assign field label to '*' operator."); } if (userAlias != null && userAlias.contains(".")) { // Can't "SELECT x AS y.x", it confuses our name promotion. throw new TypeCheckException("Cannot use the '.' character in a field alias (" + userAlias + ")"); } // Typecheck the sub-expression. subExpr.accept(this); } protected void visit(ConstExpr e) throws VisitException { // Nothing to do. } protected void visit(BinExpr e) throws VisitException { // Type-check sub-expressions. e.getLeftExpr().accept(this); e.getRightExpr().accept(this); SymbolTable symTab = mSymTableContext.top(); // Get the type from the expression; this handles promotion of // lhs to rhs or vice versa. Type expType = e.getType(symTab); if (null == expType) { // Sub-expressions cannot agree on a common type. throw new TypeCheckException("Cannot assign type to binary expression: " + e.toStringOneLine()); } e.setType(expType); // Cache this value for later. Type lhsType = e.getLeftExpr().getType(symTab); Type rhsType = e.getRightExpr().getType(symTab); // Given the operator in the binary expression, check that the type makes sense. switch (e.getOp()) { case Times: case Div: case Mod: case Subtract: // For numeric operators, input types must be numeric. if (!lhsType.isNumeric()) { throw new TypeCheckException("Operator " + e.getOp() + " requires numeric lhs argument"); } else if (!rhsType.isNumeric()) { throw new TypeCheckException("Operator " + e.getOp() + " requires numeric rhs argument"); } break; case Add: // This requires input arguments that are numeric, or strings. // Check that the output type (which is the resolved, promoted type) is numeric or string. if (!expType.isNumeric() && !expType.equals(Type.getPrimitive(Type.TypeName.STRING)) && !expType.equals(Type.getNullable(Type.TypeName.STRING))) { throw new TypeCheckException("Operator " + e.getOp() + " requires numeric or string arguments."); } break; case Greater: case GreaterEq: case Less: case LessEq: // These require comparable arguments. if (!lhsType.isComparable()) { throw new TypeCheckException("Operator " + e.getOp() + " requires comparable lhs argument."); } else if (!rhsType.isComparable()) { throw new TypeCheckException("Operator " + e.getOp() + " requires comparable rhs argument."); } break; case Eq: case NotEq: // These require primitive arguments. if (!lhsType.isPrimitive()) { throw new TypeCheckException("Cannot test for equality on non-primitive lhs"); } else if (!rhsType.isPrimitive()) { throw new TypeCheckException("Cannot test for equality on non-primitive rhs"); } break; case And: case Or: // Both arguments must be boolean. if (!lhsType.equals(Type.getPrimitive(Type.TypeName.BOOLEAN))) { throw new TypeCheckException("Operator " + e.getOp() + " requires boolean lhs."); } else if (!rhsType.equals(Type.getPrimitive(Type.TypeName.BOOLEAN))) { throw new TypeCheckException("Operator " + e.getOp() + " requires boolean rhs."); } break; default: throw new TypeCheckException("Do not know how to type-check boolean operator: " + e.getOp()); } } protected void visit(FnCallExpr e) throws VisitException { // Type-check all the argument expressions. for (Expr argExpr : e.getArgExpressions()) { argExpr.accept(this); } // Verify that all the actual expression types can be promoted to the argument types. e.resolveArgTypes(mSymTableContext.top()); } /** * @return true if symName does not exist in symTab, but "foo.symName" does. */ private boolean isAmbiguousAlias(String symName, SymbolTable symTab) { if (symTab.resolve(symName) != null) { return false; // The symbol exists; we're fine. } String dotName = "." + symName; Iterator<Symbol> symbols = symTab.levelIterator(); while (symbols.hasNext()) { Symbol sym = symbols.next(); if (sym.getName().endsWith(dotName)) { // We've found 'foo.symName', but no symName. Ambiguous identifier. return true; } } return false; } /** * Look up a field name identifier and resolve it to a Symbol and a Type. * These values are returned via Ref arguments. * @param fieldsSymTab the symbol table defining fields of the stream. * @param fieldName The field name to resolve. * @param outSym (output) The final resolved output symbol for the entity. * @param outType (output) The type for values of this field. */ private void resolveIdentifier(SymbolTable fieldsSymTab, String fieldName, Ref<AssignedSymbol> outSym, Ref<Type> outType) throws TypeCheckException { // Check that this field is defined by one of the input sources. // Since the source pushed a symbol table on the stack, just check // that we have a symbol table, and that this is a primitive value. Symbol fieldSym = fieldsSymTab.resolve(fieldName); if (null == fieldSym) { // This isn't just a simple field. Check if it's an attribute. if (fieldName.startsWith("#") && fieldName.length() > 1) { // This is an attribute name. String attrName = fieldName.substring(1); fieldSym = new AssignedSymbol(attrName, Type.getNullable(Type.TypeName.BINARY), attrName, IdentifierExpr.AccessType.ATTRIBUTE); } else if (isAmbiguousAlias(fieldName, fieldsSymTab)) { // The identifier doesn't exist, or else it's an ambiguous alias. // Return the appropriate error message. // This identifier is probably an alias for another identifier // but the alias is removed due to ambiguity in a JOIN. throw new TypeCheckException("Ambiguous identifier: \"" + fieldName + "\". " + "You must prefix this with a stream name qualifier."); } else { // This identifier straight-up doesn't exist. throw new TypeCheckException("No such identifier: \"" + fieldName + "\""); } } Type fieldType = fieldSym.getType(); if (!fieldType.isConcrete()) { // This name refers to a stream or other ephemeral type. We can't // select that. throw new TypeCheckException("Cannot select non-concrete entity \"" + fieldName + "\""); } outType.item = fieldType; // The field symbol should also be an AssignedSymbol that has a unique // reference name throughout the query. Bind to the reference name here; // the actual query uses this name instead of the user-friendly // identifier. fieldSym = fieldSym.resolveAliases(); assert fieldSym instanceof AssignedSymbol; outSym.item = (AssignedSymbol) fieldSym; } protected void visit(IdentifierExpr e) throws VisitException { SymbolTable fieldsSymTab = mSymTableContext.top(); String fieldName = e.getIdentifier(); Ref<AssignedSymbol> symRef = new Ref<AssignedSymbol>(); Ref<Type> typeRef = new Ref<Type>(); resolveIdentifier(fieldsSymTab, fieldName, symRef, typeRef); Type fieldType = typeRef.item; AssignedSymbol fieldSym = symRef.item; // Let the AST node memoize its typing information from the symbol table; // it will need to reference this at run time to look up values from the // EventWrapper. e.setType(fieldType); e.setAssignedName(fieldSym.getAssignedName()); e.setAccessType(fieldSym.getAccessType()); e.setAssignedSymbol(fieldSym); } protected void visit(UnaryExpr e) throws VisitException { // Check that this has a type, that its sub-expression has a type, // and that the subexpr type is appropriate for the operator. // Start by type-checking the subexpression. e.getSubExpr().accept(this); SymbolTable symTab = mSymTableContext.top(); Type expType = e.getType(symTab); if (null == expType) { throw new TypeCheckException("Cannot resolve type for expression: " + e.toStringOneLine()); } Type subType = e.getSubExpr().getType(symTab); switch (e.getOp()) { case Plus: case Minus: if (!subType.isNumeric()) { throw new TypeCheckException("Unary " + e.getOp() + " operator requires numeric argument"); } break; case Not: if (!subType.promotesTo(Type.getNullable(Type.TypeName.BOOLEAN))) { throw new TypeCheckException("Unary " + e.getOp() + " operator requires boolean argument"); } break; case IsNull: case IsNotNull: // Any primitive type works here. if (!subType.isPrimitive()) { throw new TypeCheckException("Unary " + e.getOp() + " operator expects primitive argument"); } break; default: throw new TypeCheckException("Cannot type-check unary operator " + e.getOp()); } } protected void visit(AllFieldsExpr e) throws VisitException { // Nothing to do. } protected void visit(WindowDef def) throws VisitException { WindowSpec spec = def.getWindowSpec(); spec.accept(this); } protected void visit(WindowSpec spec) throws VisitException { RangeSpec range = spec.getRangeSpec(); range.accept(this); } protected void visit(RangeSpec spec) throws VisitException { // Expressions within a range specification for a window must be constant, // and numeric. Expr after = spec.getAfterSize(); Expr prev = spec.getPrevSize(); after.accept(this); prev.accept(this); SymbolTable symTab = mSymTableContext.top(); Type prevType = prev.getType(symTab); Type afterType = after.getType(symTab); if (null == prevType) { throw new TypeCheckException("Cannot resolve type for expression: " + prev.toStringOneLine()); } else if (null == afterType) { throw new TypeCheckException("Cannot resolve type for expression: " + after.toStringOneLine()); } else if (!prevType.isNumeric()) { throw new TypeCheckException("Expression " + prev.toStringOneLine() + " should have numeric type."); } else if (!afterType.isNumeric()) { throw new TypeCheckException("Expression " + after.toStringOneLine() + " should have numeric type."); } else if (!prev.isConstant()) { throw new TypeCheckException("Expression " + prev.toStringOneLine() + " is not constant"); } else if (!after.isConstant()) { throw new TypeCheckException("Expression " + after.toStringOneLine() + " is not constant"); } } protected void visit(GroupBy g) throws VisitException { SymbolTable fieldsSymTab = mSymTableContext.top(); // Check that the fields are real fields; resolve them to TypedField instances, // like we do for an IdentifierExpr. List<TypedField> typedFields = new ArrayList<TypedField>(); Ref<AssignedSymbol> symRef = new Ref<AssignedSymbol>(); Ref<Type> typeRef = new Ref<Type>(); for (String fieldName : g.getFieldNames()) { resolveIdentifier(fieldsSymTab, fieldName, symRef, typeRef); Type fieldType = typeRef.item; AssignedSymbol fieldSym = symRef.item; typedFields.add(new TypedField(fieldSym.getAssignedName(), fieldType)); } g.setFieldTypes(typedFields); } }
package com.microsoft.bingads.v12.customermanagement; import javax.xml.bind.annotation.XmlEnum; import javax.xml.bind.annotation.XmlEnumValue; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for TimeZoneType. * * <p>The following schema fragment specifies the expected content contained within this class. * <p> * <pre> * &lt;simpleType name="TimeZoneType"> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}string"> * &lt;enumeration value="AbuDhabiMuscat"/> * &lt;enumeration value="Adelaide"/> * &lt;enumeration value="Alaska"/> * &lt;enumeration value="AlmatyNovosibirsk"/> * &lt;enumeration value="AmsterdamBerlinBernRomeStockholmVienna"/> * &lt;enumeration value="Arizona"/> * &lt;enumeration value="AstanaDhaka"/> * &lt;enumeration value="AthensBuckarestIstanbul"/> * &lt;enumeration value="AtlanticTimeCanada"/> * &lt;enumeration value="AucklandWellington"/> * &lt;enumeration value="Azores"/> * &lt;enumeration value="Baghdad"/> * &lt;enumeration value="BakuTbilisiYerevan"/> * &lt;enumeration value="BangkokHanoiJakarta"/> * &lt;enumeration value="BeijingChongqingHongKongUrumqi"/> * &lt;enumeration value="BelgradeBratislavaBudapestLjubljanaPrague"/> * &lt;enumeration value="BogotaLimaQuito"/> * &lt;enumeration value="Brasilia"/> * &lt;enumeration value="Brisbane"/> * &lt;enumeration value="BrusselsCopenhagenMadridParis"/> * &lt;enumeration value="Bucharest"/> * &lt;enumeration value="BuenosAiresGeorgetown"/> * &lt;enumeration value="Cairo"/> * &lt;enumeration value="CanberraMelbourneSydney"/> * &lt;enumeration value="CapeVerdeIsland"/> * &lt;enumeration value="CaracasLaPaz"/> * &lt;enumeration value="CasablancaMonrovia"/> * &lt;enumeration value="CentralAmerica"/> * &lt;enumeration value="CentralTimeUSCanada"/> * &lt;enumeration value="ChennaiKolkataMumbaiNewDelhi"/> * &lt;enumeration value="ChihuahuaLaPazMazatlan"/> * &lt;enumeration value="Darwin"/> * &lt;enumeration value="EasternTimeUSCanada"/> * &lt;enumeration value="Ekaterinburg"/> * &lt;enumeration value="FijiKamchatkaMarshallIsland"/> * &lt;enumeration value="Greenland"/> * &lt;enumeration value="GreenwichMeanTimeDublinEdinburghLisbonLondon"/> * &lt;enumeration value="GuadalajaraMexicoCityMonterrey"/> * &lt;enumeration value="GuamPortMoresby"/> * &lt;enumeration value="HararePretoria"/> * &lt;enumeration value="Hawaii"/> * &lt;enumeration value="HelsinkiKyivRigaSofiaTallinnVilnius"/> * &lt;enumeration value="Hobart"/> * &lt;enumeration value="IndianaEast"/> * &lt;enumeration value="InternationalDateLineWest"/> * &lt;enumeration value="IrkutskUlaanBataar"/> * &lt;enumeration value="IslamabadKarachiTashkent"/> * &lt;enumeration value="Jerusalem"/> * &lt;enumeration value="Kabul"/> * &lt;enumeration value="Kathmandu"/> * &lt;enumeration value="Krasnoyarsk"/> * &lt;enumeration value="KualaLumpurSingapore"/> * &lt;enumeration value="KuwaitRiyadh"/> * &lt;enumeration value="SolomonIslandNewCaledonia"/> * &lt;enumeration value="MidAtlantic"/> * &lt;enumeration value="MidwayIslandAndSamoa"/> * &lt;enumeration value="MoscowStPetersburgVolgograd"/> * &lt;enumeration value="MountainTimeUSCanada"/> * &lt;enumeration value="Nairobi"/> * &lt;enumeration value="Newfoundland"/> * &lt;enumeration value="Nukualofa"/> * &lt;enumeration value="OsakaSapporoTokyo"/> * &lt;enumeration value="PacificTimeUSCanadaTijuana"/> * &lt;enumeration value="Perth"/> * &lt;enumeration value="Rangoon"/> * &lt;enumeration value="Santiago"/> * &lt;enumeration value="SarajevoSkopjeWarsawZagreb"/> * &lt;enumeration value="Saskatchewan"/> * &lt;enumeration value="Seoul"/> * &lt;enumeration value="SriJayawardenepura"/> * &lt;enumeration value="Taipei"/> * &lt;enumeration value="Tehran"/> * &lt;enumeration value="Vladivostok"/> * &lt;enumeration value="WestCentralAfrica"/> * &lt;enumeration value="Yakutsk"/> * &lt;/restriction> * &lt;/simpleType> * </pre> * */ @XmlType(name = "TimeZoneType", namespace = "https://bingads.microsoft.com/Customer/v12/Entities") @XmlEnum public enum TimeZoneType { @XmlEnumValue("AbuDhabiMuscat") ABU_DHABI_MUSCAT("AbuDhabiMuscat"), @XmlEnumValue("Adelaide") ADELAIDE("Adelaide"), @XmlEnumValue("Alaska") ALASKA("Alaska"), @XmlEnumValue("AlmatyNovosibirsk") ALMATY_NOVOSIBIRSK("AlmatyNovosibirsk"), @XmlEnumValue("AmsterdamBerlinBernRomeStockholmVienna") AMSTERDAM_BERLIN_BERN_ROME_STOCKHOLM_VIENNA("AmsterdamBerlinBernRomeStockholmVienna"), @XmlEnumValue("Arizona") ARIZONA("Arizona"), @XmlEnumValue("AstanaDhaka") ASTANA_DHAKA("AstanaDhaka"), @XmlEnumValue("AthensBuckarestIstanbul") ATHENS_BUCKAREST_ISTANBUL("AthensBuckarestIstanbul"), @XmlEnumValue("AtlanticTimeCanada") ATLANTIC_TIME_CANADA("AtlanticTimeCanada"), @XmlEnumValue("AucklandWellington") AUCKLAND_WELLINGTON("AucklandWellington"), @XmlEnumValue("Azores") AZORES("Azores"), @XmlEnumValue("Baghdad") BAGHDAD("Baghdad"), @XmlEnumValue("BakuTbilisiYerevan") BAKU_TBILISI_YEREVAN("BakuTbilisiYerevan"), @XmlEnumValue("BangkokHanoiJakarta") BANGKOK_HANOI_JAKARTA("BangkokHanoiJakarta"), @XmlEnumValue("BeijingChongqingHongKongUrumqi") BEIJING_CHONGQING_HONG_KONG_URUMQI("BeijingChongqingHongKongUrumqi"), @XmlEnumValue("BelgradeBratislavaBudapestLjubljanaPrague") BELGRADE_BRATISLAVA_BUDAPEST_LJUBLJANA_PRAGUE("BelgradeBratislavaBudapestLjubljanaPrague"), @XmlEnumValue("BogotaLimaQuito") BOGOTA_LIMA_QUITO("BogotaLimaQuito"), @XmlEnumValue("Brasilia") BRASILIA("Brasilia"), @XmlEnumValue("Brisbane") BRISBANE("Brisbane"), @XmlEnumValue("BrusselsCopenhagenMadridParis") BRUSSELS_COPENHAGEN_MADRID_PARIS("BrusselsCopenhagenMadridParis"), @XmlEnumValue("Bucharest") BUCHAREST("Bucharest"), @XmlEnumValue("BuenosAiresGeorgetown") BUENOS_AIRES_GEORGETOWN("BuenosAiresGeorgetown"), @XmlEnumValue("Cairo") CAIRO("Cairo"), @XmlEnumValue("CanberraMelbourneSydney") CANBERRA_MELBOURNE_SYDNEY("CanberraMelbourneSydney"), @XmlEnumValue("CapeVerdeIsland") CAPE_VERDE_ISLAND("CapeVerdeIsland"), @XmlEnumValue("CaracasLaPaz") CARACAS_LA_PAZ("CaracasLaPaz"), @XmlEnumValue("CasablancaMonrovia") CASABLANCA_MONROVIA("CasablancaMonrovia"), @XmlEnumValue("CentralAmerica") CENTRAL_AMERICA("CentralAmerica"), @XmlEnumValue("CentralTimeUSCanada") CENTRAL_TIME_US_CANADA("CentralTimeUSCanada"), @XmlEnumValue("ChennaiKolkataMumbaiNewDelhi") CHENNAI_KOLKATA_MUMBAI_NEW_DELHI("ChennaiKolkataMumbaiNewDelhi"), @XmlEnumValue("ChihuahuaLaPazMazatlan") CHIHUAHUA_LA_PAZ_MAZATLAN("ChihuahuaLaPazMazatlan"), @XmlEnumValue("Darwin") DARWIN("Darwin"), @XmlEnumValue("EasternTimeUSCanada") EASTERN_TIME_US_CANADA("EasternTimeUSCanada"), @XmlEnumValue("Ekaterinburg") EKATERINBURG("Ekaterinburg"), @XmlEnumValue("FijiKamchatkaMarshallIsland") FIJI_KAMCHATKA_MARSHALL_ISLAND("FijiKamchatkaMarshallIsland"), @XmlEnumValue("Greenland") GREENLAND("Greenland"), @XmlEnumValue("GreenwichMeanTimeDublinEdinburghLisbonLondon") GREENWICH_MEAN_TIME_DUBLIN_EDINBURGH_LISBON_LONDON("GreenwichMeanTimeDublinEdinburghLisbonLondon"), @XmlEnumValue("GuadalajaraMexicoCityMonterrey") GUADALAJARA_MEXICO_CITY_MONTERREY("GuadalajaraMexicoCityMonterrey"), @XmlEnumValue("GuamPortMoresby") GUAM_PORT_MORESBY("GuamPortMoresby"), @XmlEnumValue("HararePretoria") HARARE_PRETORIA("HararePretoria"), @XmlEnumValue("Hawaii") HAWAII("Hawaii"), @XmlEnumValue("HelsinkiKyivRigaSofiaTallinnVilnius") HELSINKI_KYIV_RIGA_SOFIA_TALLINN_VILNIUS("HelsinkiKyivRigaSofiaTallinnVilnius"), @XmlEnumValue("Hobart") HOBART("Hobart"), @XmlEnumValue("IndianaEast") INDIANA_EAST("IndianaEast"), @XmlEnumValue("InternationalDateLineWest") INTERNATIONAL_DATE_LINE_WEST("InternationalDateLineWest"), @XmlEnumValue("IrkutskUlaanBataar") IRKUTSK_ULAAN_BATAAR("IrkutskUlaanBataar"), @XmlEnumValue("IslamabadKarachiTashkent") ISLAMABAD_KARACHI_TASHKENT("IslamabadKarachiTashkent"), @XmlEnumValue("Jerusalem") JERUSALEM("Jerusalem"), @XmlEnumValue("Kabul") KABUL("Kabul"), @XmlEnumValue("Kathmandu") KATHMANDU("Kathmandu"), @XmlEnumValue("Krasnoyarsk") KRASNOYARSK("Krasnoyarsk"), @XmlEnumValue("KualaLumpurSingapore") KUALA_LUMPUR_SINGAPORE("KualaLumpurSingapore"), @XmlEnumValue("KuwaitRiyadh") KUWAIT_RIYADH("KuwaitRiyadh"), @XmlEnumValue("SolomonIslandNewCaledonia") SOLOMON_ISLAND_NEW_CALEDONIA("SolomonIslandNewCaledonia"), @XmlEnumValue("MidAtlantic") MID_ATLANTIC("MidAtlantic"), @XmlEnumValue("MidwayIslandAndSamoa") MIDWAY_ISLAND_AND_SAMOA("MidwayIslandAndSamoa"), @XmlEnumValue("MoscowStPetersburgVolgograd") MOSCOW_ST_PETERSBURG_VOLGOGRAD("MoscowStPetersburgVolgograd"), @XmlEnumValue("MountainTimeUSCanada") MOUNTAIN_TIME_US_CANADA("MountainTimeUSCanada"), @XmlEnumValue("Nairobi") NAIROBI("Nairobi"), @XmlEnumValue("Newfoundland") NEWFOUNDLAND("Newfoundland"), @XmlEnumValue("Nukualofa") NUKUALOFA("Nukualofa"), @XmlEnumValue("OsakaSapporoTokyo") OSAKA_SAPPORO_TOKYO("OsakaSapporoTokyo"), @XmlEnumValue("PacificTimeUSCanadaTijuana") PACIFIC_TIME_US_CANADA_TIJUANA("PacificTimeUSCanadaTijuana"), @XmlEnumValue("Perth") PERTH("Perth"), @XmlEnumValue("Rangoon") RANGOON("Rangoon"), @XmlEnumValue("Santiago") SANTIAGO("Santiago"), @XmlEnumValue("SarajevoSkopjeWarsawZagreb") SARAJEVO_SKOPJE_WARSAW_ZAGREB("SarajevoSkopjeWarsawZagreb"), @XmlEnumValue("Saskatchewan") SASKATCHEWAN("Saskatchewan"), @XmlEnumValue("Seoul") SEOUL("Seoul"), @XmlEnumValue("SriJayawardenepura") SRI_JAYAWARDENEPURA("SriJayawardenepura"), @XmlEnumValue("Taipei") TAIPEI("Taipei"), @XmlEnumValue("Tehran") TEHRAN("Tehran"), @XmlEnumValue("Vladivostok") VLADIVOSTOK("Vladivostok"), @XmlEnumValue("WestCentralAfrica") WEST_CENTRAL_AFRICA("WestCentralAfrica"), @XmlEnumValue("Yakutsk") YAKUTSK("Yakutsk"); private final String value; TimeZoneType(String v) { value = v; } public String value() { return value; } public static TimeZoneType fromValue(String v) { for (TimeZoneType c: TimeZoneType.values()) { if (c.value.equals(v)) { return c; } } throw new IllegalArgumentException(v); } }
// Copyright 2016 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.analysis.configuredtargets; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.devtools.build.lib.actions.ActionAnalysisMetadata; import com.google.devtools.build.lib.analysis.AnalysisUtils; import com.google.devtools.build.lib.analysis.ConfiguredAspect; import com.google.devtools.build.lib.analysis.ConfiguredTarget; import com.google.devtools.build.lib.analysis.DuplicateException; import com.google.devtools.build.lib.analysis.ExtraActionArtifactsProvider; import com.google.devtools.build.lib.analysis.OutputGroupInfo; import com.google.devtools.build.lib.analysis.RequiredConfigFragmentsProvider; import com.google.devtools.build.lib.analysis.TransitiveInfoProvider; import com.google.devtools.build.lib.analysis.TransitiveInfoProviderMap; import com.google.devtools.build.lib.analysis.TransitiveInfoProviderMapBuilder; import com.google.devtools.build.lib.concurrent.ThreadSafety.Immutable; import com.google.devtools.build.lib.packages.Info; import com.google.devtools.build.lib.packages.Provider; import com.google.devtools.build.lib.packages.Provider.Key; import com.google.devtools.build.lib.starlarkbuildapi.ActionApi; import java.util.ArrayList; import java.util.List; import java.util.function.Consumer; import net.starlark.java.eval.Dict; import net.starlark.java.eval.Printer; /** * A single dependency with its configured target and aspects merged together. * * <p>This is an ephemeral object created only for the analysis of a single configured target. After * that configured target is analyzed, this is thrown away. */ @Immutable public final class MergedConfiguredTarget extends AbstractConfiguredTarget { private final ConfiguredTarget base; private final ImmutableList<ConfiguredAspect> aspects; /** * Providers that come from any source that isn't a pure pointer to the base rule's providers. * * <p>Examples include providers from aspects and merged providers that appear in both the base * rule and aspects. */ private final TransitiveInfoProviderMap nonBaseProviders; private MergedConfiguredTarget( ConfiguredTarget base, Iterable<ConfiguredAspect> aspects, TransitiveInfoProviderMap nonBaseProviders) { super(base.getLabel(), base.getConfigurationKey()); this.base = base; this.aspects = ImmutableList.copyOf(aspects); this.nonBaseProviders = nonBaseProviders; } @Override public <P extends TransitiveInfoProvider> P getProvider(Class<P> providerClass) { AnalysisUtils.checkProvider(providerClass); P provider = nonBaseProviders.getProvider(providerClass); if (provider == null) { provider = base.getProvider(providerClass); } return provider; } @Override protected void addExtraStarlarkKeys(Consumer<String> result) { if (base instanceof AbstractConfiguredTarget) { ((AbstractConfiguredTarget) base).addExtraStarlarkKeys(result); } for (int i = 0; i < nonBaseProviders.getProviderCount(); i++) { Object classAt = nonBaseProviders.getProviderKeyAt(i); if (classAt instanceof String) { result.accept((String) classAt); } } result.accept(AbstractConfiguredTarget.ACTIONS_FIELD_NAME); } @Override protected Info rawGetStarlarkProvider(Provider.Key providerKey) { Info provider = nonBaseProviders.get(providerKey); if (provider == null) { provider = base.get(providerKey); } return provider; } @Override protected Object rawGetStarlarkProvider(String providerKey) { if (providerKey.equals(AbstractConfiguredTarget.ACTIONS_FIELD_NAME)) { ImmutableList.Builder<ActionAnalysisMetadata> actions = ImmutableList.builder(); // Only expose actions which are StarlarkValues. // TODO(cparsons): Expose all actions to Starlark. for (ConfiguredAspect aspect : aspects) { actions.addAll( aspect.getActions().stream().filter(action -> action instanceof ActionApi).iterator()); } if (base instanceof RuleConfiguredTarget) { actions.addAll( ((RuleConfiguredTarget) base) .getActions().stream().filter(action -> action instanceof ActionApi).iterator()); } return actions.build(); } Object provider = nonBaseProviders.get(providerKey); if (provider == null) { provider = base.get(providerKey); } return provider; } /** Creates an instance based on a configured target and a set of aspects. */ public static ConfiguredTarget of(ConfiguredTarget base, Iterable<ConfiguredAspect> aspects) throws DuplicateException { if (Iterables.isEmpty(aspects)) { // If there are no aspects, don't bother with creating a proxy object return base; } TransitiveInfoProviderMapBuilder nonBaseProviders = new TransitiveInfoProviderMapBuilder(); // Merge output group providers. OutputGroupInfo mergedOutputGroupInfo = OutputGroupInfo.merge(getAllOutputGroupProviders(base, aspects)); if (mergedOutputGroupInfo != null) { nonBaseProviders.put(mergedOutputGroupInfo); } // Merge extra-actions provider. ExtraActionArtifactsProvider mergedExtraActionProviders = ExtraActionArtifactsProvider.merge( getAllProviders(base, aspects, ExtraActionArtifactsProvider.class)); if (mergedExtraActionProviders != null) { nonBaseProviders.add(mergedExtraActionProviders); } // Merge required config fragments provider. List<RequiredConfigFragmentsProvider> requiredConfigFragmentProviders = getAllProviders(base, aspects, RequiredConfigFragmentsProvider.class); if (!requiredConfigFragmentProviders.isEmpty()) { nonBaseProviders.add(RequiredConfigFragmentsProvider.merge(requiredConfigFragmentProviders)); } for (ConfiguredAspect aspect : aspects) { TransitiveInfoProviderMap providers = aspect.getProviders(); for (int i = 0; i < providers.getProviderCount(); ++i) { Object providerKey = providers.getProviderKeyAt(i); if (OutputGroupInfo.STARLARK_CONSTRUCTOR.getKey().equals(providerKey) || ExtraActionArtifactsProvider.class.equals(providerKey) || RequiredConfigFragmentsProvider.class.equals(providerKey)) { continue; } if (providerKey instanceof Class<?>) { @SuppressWarnings("unchecked") Class<? extends TransitiveInfoProvider> providerClass = (Class<? extends TransitiveInfoProvider>) providerKey; if (base.getProvider(providerClass) != null || nonBaseProviders.contains(providerClass)) { throw new DuplicateException("Provider " + providerKey + " provided twice"); } nonBaseProviders.put( providerClass, (TransitiveInfoProvider) providers.getProviderInstanceAt(i)); } else if (providerKey instanceof String) { String legacyId = (String) providerKey; if (base.get(legacyId) != null || nonBaseProviders.contains(legacyId)) { throw new DuplicateException("Provider " + legacyId + " provided twice"); } nonBaseProviders.put(legacyId, providers.getProviderInstanceAt(i)); } else if (providerKey instanceof Provider.Key) { Provider.Key key = (Key) providerKey; if (base.get(key) != null || nonBaseProviders.contains(key)) { throw new DuplicateException("Provider " + key + " provided twice"); } nonBaseProviders.put((Info) providers.getProviderInstanceAt(i)); } } } return new MergedConfiguredTarget(base, aspects, nonBaseProviders.build()); } private static ImmutableList<OutputGroupInfo> getAllOutputGroupProviders( ConfiguredTarget base, Iterable<ConfiguredAspect> aspects) { OutputGroupInfo baseProvider = OutputGroupInfo.get(base); ImmutableList.Builder<OutputGroupInfo> providers = ImmutableList.builder(); if (baseProvider != null) { providers.add(baseProvider); } for (ConfiguredAspect configuredAspect : aspects) { OutputGroupInfo aspectProvider = OutputGroupInfo.get(configuredAspect); if (aspectProvider == null) { continue; } providers.add(aspectProvider); } return providers.build(); } private static <T extends TransitiveInfoProvider> List<T> getAllProviders( ConfiguredTarget base, Iterable<ConfiguredAspect> aspects, Class<T> providerClass) { T baseProvider = base.getProvider(providerClass); List<T> providers = new ArrayList<>(); if (baseProvider != null) { providers.add(baseProvider); } for (ConfiguredAspect configuredAspect : aspects) { T aspectProvider = configuredAspect.getProvider(providerClass); if (aspectProvider == null) { continue; } providers.add(aspectProvider); } return providers; } @Override public void repr(Printer printer) { printer.append("<merged target " + getLabel() + ">"); } @Override public Dict<String, Object> getProvidersDict() { return ConfiguredTargetsUtil.getProvidersDict(this, nonBaseProviders); } @VisibleForTesting public ConfiguredTarget getBaseConfiguredTargetForTesting() { return base; } }
package tektor.minecraft.talldoors.entities.drawbridge; import java.util.List; import tektor.minecraft.talldoors.TallDoorsBase; import tektor.minecraft.talldoors.items.Connector; import net.minecraft.entity.Entity; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.item.ItemStack; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.util.AxisAlignedBB; import net.minecraft.util.ChatComponentText; import net.minecraft.util.DamageSource; import net.minecraft.world.World; public class DrawbridgeMachine extends Entity { public int orientation; // 28 public DrawbridgeBase base; public boolean powered; private double mX, mY, mZ; public double width2; // 23 public double height2; // 24 public double lon; // 25 public double rotation; // 26 public double spool; // 27 public DrawbridgeMachine(World par1World) { super(par1World); this.setSize(1f, 1f); this.ignoreFrustumCheck = true; this.powered = false; mX = 0; mY = 0; mZ = 0; width2 = height2 = lon = rotation = spool = 1; } public void setStuff(double width, double height, double depth, double rot, double spoolsize) { width2 = width; this.dataWatcher.updateObject(23, (int) width2); height2 = height; this.dataWatcher.updateObject(24, (int) height2); lon = depth; this.dataWatcher.updateObject(25, (int) lon); rotation = rot; this.dataWatcher.updateObject(26, (int) rot); spool = spoolsize; this.dataWatcher.updateObject(27, (int) spool); } public void onUpdate() { if (this.worldObj.isRemote) { width2 = this.dataWatcher.getWatchableObjectInt(23); height2 = this.dataWatcher.getWatchableObjectInt(24); lon = this.dataWatcher.getWatchableObjectInt(25); rotation = this.dataWatcher.getWatchableObjectInt(26); spool = this.dataWatcher.getWatchableObjectInt(27); orientation = this.dataWatcher.getWatchableObjectInt(28); } else { boolean power = false; if (this.orientation == 0) { for (int i = 0; i < this.width2; i++) { for (int k = 0; k < this.lon; k++) { if (worldObj.getBlockPowerInput((int) posX + k, (int) posY, (int) posZ + i) > 0) power = true; if (power) break; } if (power) break; } } else if (this.orientation == 1) { for (int i = 0; i < this.width2; i++) { for (int k = 0; k < this.lon; k++) { if (worldObj.getBlockPowerInput((int) posX - i, (int) posY, (int) posZ + k) > 0) power = true; if (power) break; } if (power) break; } } else if (this.orientation == 2) { for (int i = 0; i < this.width2; i++) { for (int k = 0; k < this.lon; k++) { if (worldObj.getBlockPowerInput((int) posX - k, (int) posY, (int) posZ - i) > 0) power = true; if (power) break; } if (power) break; } } else if (this.orientation == 3) { for (int i = 0; i < this.width2; i++) { for (int k = 0; k < this.lon; k++) { if (worldObj.getBlockPowerInput((int) posX + i, (int) posY, (int) posZ - k) > 0) power = true; if (power) break; } if (power) break; } } if (power && !powered) { if (base != null) { base.activate(); } powered = true; } else if (!power && powered){ powered = false; } } if (base == null) { @SuppressWarnings({ "unchecked", "static-access" }) List<DrawbridgeBase> list = (List<DrawbridgeBase>) worldObj .getEntitiesWithinAABB(DrawbridgeBase.class, boundingBox .getBoundingBox(mX - 1, mY - 1, mZ - 1, mX + 1, mY + 1, mZ + 1)); base = list.isEmpty() ? null : list.get(0); } setBoundsAt(posX, posY, posZ); } @Override protected void entityInit() { this.dataWatcher.addObject(28, 0); this.dataWatcher.addObject(23, 0); this.dataWatcher.addObject(24, 0); this.dataWatcher.addObject(25, 0); this.dataWatcher.addObject(26, 0); this.dataWatcher.addObject(27, 0); } @Override protected void readEntityFromNBT(NBTTagCompound nbt) { this.setOrientation(nbt.getInteger("orientation")); this.powered = nbt.getBoolean("power"); @SuppressWarnings("unchecked") List<DrawbridgeBase> list = (List<DrawbridgeBase>) worldObj .getEntitiesWithinAABB(DrawbridgeBase.class, AxisAlignedBB .getBoundingBox(nbt.getDouble("mX") - 1, nbt.getDouble("mY") - 1, nbt.getDouble("mZ") - 1, nbt.getDouble("mx") + 1, nbt.getDouble("mY") + 1, nbt.getDouble("mZ") + 1)); base = list.isEmpty() ? null : list.get(0); this.mX = nbt.getDouble("mX"); this.mY = nbt.getDouble("mY"); this.mZ = nbt.getDouble("mZ"); this.width2 = nbt.getDouble("width"); this.dataWatcher.updateObject(23, (int) width2); this.height2 = nbt.getDouble("height"); this.dataWatcher.updateObject(24, (int) height2); this.lon = nbt.getDouble("lon"); this.dataWatcher.updateObject(25, (int) lon); this.rotation = nbt.getDouble("rotation"); this.dataWatcher.updateObject(26, (int) rotation); this.spool = nbt.getDouble("spool"); this.dataWatcher.updateObject(27, (int) spool); } @Override protected void writeEntityToNBT(NBTTagCompound nbt) { nbt.setInteger("orientation", orientation); nbt.setBoolean("power", powered); if (base != null) { nbt.setDouble("mX", base.posX); nbt.setDouble("mY", base.posY); nbt.setDouble("mZ", base.posZ); } nbt.setDouble("width", width2); nbt.setDouble("height", height2); nbt.setDouble("lon", lon); nbt.setDouble("rotation", rotation); nbt.setDouble("spool", spool); } @Override public AxisAlignedBB getBoundingBox() { return this.boundingBox; } @Override public AxisAlignedBB getCollisionBox(Entity par1Entity) { return this.boundingBox; } @Override public boolean canBeCollidedWith() { return true; } public void setOrientation(int var24) { orientation = var24; this.dataWatcher.updateObject(28, var24); } @Override public void onCollideWithPlayer(EntityPlayer par1EntityPlayer) { } @Override public void setPositionAndRotation2(double par1, double par3, double par5, float par7, float par8, int par9) { this.setPosition(par1, par3, par5); this.setRotation(par7, par8); } @Override public void setPosition(double par1, double par3, double par5) { this.posX = par1; this.posY = par3; this.posZ = par5; setBoundsAt(par1, par3, par5); } public void setBoundsAt(double par1, double par3, double par5) { double f1 = this.height2; if (this.orientation == 0) { this.boundingBox.setBounds(par1, par3 - this.yOffset + this.ySize, par5, par1 + lon, par3 - this.yOffset + this.ySize + f1, par5 + width2); } else if (this.orientation == 1) { this.boundingBox.setBounds(par1 - width2 + 1, par3 - this.yOffset + this.ySize, par5, par1 + 1, par3 - this.yOffset + this.ySize + f1, par5 + lon); } else if (this.orientation == 2) { this.boundingBox.setBounds(par1 - lon + 1, par3 - this.yOffset + this.ySize, par5 - width2 + 1, par1 + 1, par3 - this.yOffset + this.ySize + f1, par5 + 1); } else if (this.orientation == 3) { this.boundingBox.setBounds(par1, par3 - this.yOffset + this.ySize, par5 - lon + 1, par1 + width2, par3 - this.yOffset + this.ySize + f1, par5 + 1); } } @Override public boolean interactFirst(EntityPlayer player) { if (!this.worldObj.isRemote) { ItemStack i = player.inventory.getCurrentItem(); if (i != null && i.getItem().equals(TallDoorsBase.connector) && ((Connector) player.inventory.getCurrentItem().getItem()).base != null) { if ((((Connector) player.inventory.getCurrentItem().getItem()).base.posY + ((Connector) player.inventory.getCurrentItem() .getItem()).base.lon - 1) < this.posY) { this.base = ((Connector) player.inventory.getCurrentItem() .getItem()).base; base.machine = this; base.setMachinePos(posX, posY, posZ); player.inventory.decrStackSize( player.inventory.currentItem, 1); } else { player.addChatMessage(new ChatComponentText("A voice tells you: The Machine has to be placed higher.")); } } if (player.inventory.getCurrentItem() != null && player.inventory.getCurrentItem().getItem().equals(TallDoorsBase.destructionHammer)) { func_110128_b(player); player.inventory.getCurrentItem().damageItem(1, player); return true; } } else { if (player.inventory.getCurrentItem() != null && player.inventory.getCurrentItem().getItem().equals(TallDoorsBase.destructionHammer)) { player.swingItem(); } } return true; } @Override public boolean attackEntityFrom(DamageSource par1DamageSource, float par2) { return false; } public void func_110128_b(Entity par1Entity) { if (par1Entity instanceof EntityPlayer) { EntityPlayer entityplayer = (EntityPlayer) par1Entity; this.setDead(); if (entityplayer.capabilities.isCreativeMode) { return; } } this.entityDropItem(new ItemStack(TallDoorsBase.drawbridge, 1, 1), 0.0F); } }
/* * Copyright 2017 StreamSets Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.streamsets.pipeline.lib.jdbc.multithread; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.HashMultimap; import com.google.common.collect.Maps; import com.google.common.collect.Multimap; import com.google.common.collect.Sets; import com.google.common.collect.SortedSetMultimap; import com.streamsets.pipeline.api.StageException; import com.streamsets.pipeline.api.impl.Utils; import com.streamsets.pipeline.lib.jdbc.multithread.util.OffsetQueryUtil; import com.streamsets.pipeline.stage.origin.jdbc.table.PartitioningMode; import com.streamsets.pipeline.stage.origin.jdbc.table.TableJdbcSource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.ArrayDeque; import java.util.Collection; import java.util.Collections; import java.util.Deque; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Queue; import java.util.Set; import java.util.SortedSet; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.atomic.AtomicBoolean; /** * Helper class for maintaining and organizing workable tables to threads */ public final class MultithreadedTableProvider { private static final Logger LOG = LoggerFactory.getLogger(MultithreadedTableProvider.class); private Map<String, TableContext> tableContextMap; private final Multimap<String, TableContext> remainingSchemasToTableContexts = HashMultimap.create(); private final Multimap<String, TableContext> completedSchemasToTableContexts = HashMultimap.create(); private final LinkedList<TableRuntimeContext> sharedAvailableTablesList; private final Set<TableContext> tablesWithNoMoreData; private Map<Integer, Integer> threadNumToMaxTableSlots; private final int numThreads; private final BatchTableStrategy batchTableStrategy; private final TableMaxOffsetValueUpdater tableMaxOffsetValueUpdater; private Queue<String> sortedTableOrder; private final ThreadLocal<Deque<TableRuntimeContext>> ownedTablesQueue = ThreadLocal.withInitial(LinkedList::new); private final ConcurrentMap<TableContext, Integer> maxPartitionWithDataPerTable = Maps.newConcurrentMap(); private final SortedSetMultimap<TableContext, TableRuntimeContext> activeRuntimeContexts = TableRuntimeContext.buildSortedPartitionMap(); private final Object partitionStateLock = activeRuntimeContexts; private final Set<TableRuntimeContext> removedPartitions = Sets.newConcurrentHashSet(); private volatile boolean isNoMoreDataEventGeneratedAlready = false; public MultithreadedTableProvider( Map<String, TableContext> tableContextMap, Queue<String> sortedTableOrder, Map<Integer, Integer> threadNumToMaxTableSlots, int numThreads, BatchTableStrategy batchTableStrategy, TableMaxOffsetValueUpdater tableMaxOffsetValueUpdater ) { this.tableContextMap = new ConcurrentHashMap<>(tableContextMap); initializeRemainingSchemasToTableContexts(); this.numThreads = numThreads; this.batchTableStrategy = batchTableStrategy; this.tableMaxOffsetValueUpdater = tableMaxOffsetValueUpdater; final Map<String, Integer> tableNameToOrder = new HashMap<>(); int order = 1; for (String tableName : sortedTableOrder) { tableNameToOrder.put(tableName, order++); } sharedAvailableTablesList = new LinkedList<>(); this.sortedTableOrder = new ArrayDeque<>(sortedTableOrder); // always construct initial values for partition queue based on table contexts // if stored offsets come into play, those will be handled by a subsequent invocation generateInitialPartitionsInSharedQueue(false, null, null); this.tablesWithNoMoreData = Sets.newConcurrentHashSet(); this.threadNumToMaxTableSlots = threadNumToMaxTableSlots; } private void initializeRemainingSchemasToTableContexts() { for (final TableContext tableContext : this.tableContextMap.values()) { remainingSchemasToTableContexts.put(tableContext.getSchema(), tableContext); } completedSchemasToTableContexts.clear(); } public void setTableContextMap(Map<String, TableContext> tableContextMap, Queue<String> sortedTableOrder) { if (!tableContextMap.equals(this.tableContextMap)) { this.tableContextMap = new ConcurrentHashMap<>(tableContextMap); final Map<String, Integer> tableNameToOrder = new HashMap<>(); int order = 1; for (String tableName : sortedTableOrder) { tableNameToOrder.put(tableName, order++); } this.sortedTableOrder = new ArrayDeque<>(sortedTableOrder); // always construct initial values for partition queue based on table contexts // if stored offsets come into play, those will be handled by a subsequent invocation generateInitialPartitionsInSharedQueue(false, null, null); } } public Set<String> initializeFromV1Offsets(Map<String, String> offsets) throws StageException { // v1 offsets map qualified table names to offset column positions LOG.info("Upgrading offsets from v1 to v2; logging current offsets now"); offsets.forEach((t, v) -> LOG.info("{} -> {}", t, v)); final Set<String> offsetKeysToRemove = new HashSet<>(); SortedSetMultimap<TableContext, TableRuntimeContext> v1Offsets = TableRuntimeContext.initializeAndUpgradeFromV1Offsets( tableContextMap, offsets, offsetKeysToRemove ); generateInitialPartitionsInSharedQueue(true, v1Offsets, null); initializeMaxPartitionWithDataPerTable(offsets); return offsetKeysToRemove; } public void initializeFromV2Offsets( Map<String, String> offsets, Map<String, String> newCommitOffsets ) throws StageException { final Set<TableContext> excludeTables = new HashSet<>(); SortedSetMultimap<TableContext, TableRuntimeContext> v2Offsets = TableRuntimeContext.buildPartitionsFromStoredV2Offsets( tableContextMap, offsets, excludeTables, newCommitOffsets ); handlePartitioningTurnedOffOrOn(v2Offsets); generateInitialPartitionsInSharedQueue(true, v2Offsets, excludeTables); initializeMaxPartitionWithDataPerTable(newCommitOffsets); } /** * Checks whether any tables have had partitioning turned off or not, and updates the partition map appropriately * * @param reconstructedPartitions the reconstructed partitions (may be modified) */ private void handlePartitioningTurnedOffOrOn( SortedSetMultimap<TableContext, TableRuntimeContext> reconstructedPartitions ) { for (TableContext tableContext : reconstructedPartitions.keySet()) { final SortedSet<TableRuntimeContext> partitions = reconstructedPartitions.get(tableContext); final TableRuntimeContext lastPartition = partitions.last(); final TableContext sourceTableContext = lastPartition.getSourceTableContext(); Utils.checkState( sourceTableContext.equals(tableContext), String.format( "Source table context for %s should match TableContext map key of %s", lastPartition.getDescription(), tableContext.getQualifiedName() ) ); final boolean partitioningTurnedOff = lastPartition.isPartitioned() && sourceTableContext.getPartitioningMode() == PartitioningMode.DISABLED; final boolean partitioningTurnedOn = !lastPartition.isPartitioned() && sourceTableContext.isPartitionable() && sourceTableContext.getPartitioningMode() != PartitioningMode.DISABLED; if (!partitioningTurnedOff && !partitioningTurnedOn) { continue; } final Map<String, String> nextStartingOffsets = new HashMap<>(); final Map<String, String> nextMaxOffsets = new HashMap<>(); final int newPartitionSequence = lastPartition.getPartitionSequence() > 0 ? lastPartition.getPartitionSequence() + 1 : 1; if (partitioningTurnedOff) { LOG.info( "Table {} has switched from partitioned to non-partitioned; partition sequence {} will be the last (with" + " no max offsets)", sourceTableContext.getQualifiedName(), newPartitionSequence ); lastPartition.getPartitionOffsetStart().forEach( (col, off) -> { String basedOnStartOffset = lastPartition.generateNextPartitionOffset(col, off); nextStartingOffsets.put(col, basedOnStartOffset); } ); } else if (partitioningTurnedOn) { lastPartition.getPartitionOffsetStart().forEach( (col, off) -> { String basedOnStoredOffset = lastPartition.getInitialStoredOffsets().get(col); nextStartingOffsets.put(col, basedOnStoredOffset); } ); nextStartingOffsets.forEach( (col, off) -> nextMaxOffsets.put(col, lastPartition.generateNextPartitionOffset(col, off)) ); if (!reconstructedPartitions.remove(sourceTableContext, lastPartition)) { throw new IllegalStateException(String.format( "Failed to remove partition %s for table %s in switching partitioning from off to on", lastPartition.getDescription(), sourceTableContext.getQualifiedName() )); } LOG.info( "Table {} has switched from non-partitioned to partitioned; using last stored offsets as the starting" + " offsets for the new partition {}", sourceTableContext.getQualifiedName(), newPartitionSequence ); } final TableRuntimeContext nextPartition = new TableRuntimeContext( sourceTableContext, lastPartition.isUsingNonIncrementalLoad(), (lastPartition.isPartitioned() && !partitioningTurnedOff) || partitioningTurnedOn, newPartitionSequence, nextStartingOffsets, nextMaxOffsets ); reconstructedPartitions.put(sourceTableContext, nextPartition); } } @VisibleForTesting void generateInitialPartitionsInSharedQueue( boolean fromStoredOffsets, Multimap<TableContext, TableRuntimeContext> reconstructedPartitions, Set<TableContext> excludeTables ) { sharedAvailableTablesList.clear(); activeRuntimeContexts.clear(); for (String qualifiedTableName : sortedTableOrder) { //create the initial partition for each table final TableContext tableContext = tableContextMap.get(qualifiedTableName); if (excludeTables != null && excludeTables.contains(tableContext)) { LOG.debug("Not adding table {} to table provider since it was excluded", qualifiedTableName); // Since the table is ignored, we have to set the no-more-data like events as if the table was already transferred tablesWithNoMoreData.add(tableContext); remainingSchemasToTableContexts.remove(tableContext.getSchema(), tableContext); completedSchemasToTableContexts.put(tableContext.getSchema(), tableContext); continue; } Collection<TableRuntimeContext> partitions = null; if (fromStoredOffsets) { partitions = reconstructedPartitions.get(tableContext); } if (partitions == null || partitions.isEmpty()) { partitions = Collections.singletonList(TableRuntimeContext.createInitialPartition(tableContext)); } partitions.forEach(sharedAvailableTablesList::add); activeRuntimeContexts.putAll(tableContext, partitions); } } @VisibleForTesting void initializeMaxPartitionWithDataPerTable(Map<String, String> offsets) { Map<TableContext, Integer> maxWithData = new HashMap<>(); for (TableContext table : activeRuntimeContexts.keySet()) { final SortedSet<TableRuntimeContext> partitions = activeRuntimeContexts.get(table); TableRuntimeContext firstPartition = partitions.first(); // as a baseline, the partition sequence one lower than the minimum reconstructed partition has had data // since otherwise, it would still be part of the stored data if (firstPartition.isPartitioned()) { maxWithData.put(table, firstPartition.getPartitionSequence() - 1); } for (TableRuntimeContext partition : partitions) { if (offsets.containsKey(partition.getOffsetKey())) { final Map<String, String> storedOffsets = OffsetQueryUtil.getColumnsToOffsetMapFromOffsetFormat( offsets.get(partition.getOffsetKey()) ); final Map<String, String> startOffsets = partition.getPartitionOffsetStart(); for (Map.Entry<String, String> storedOffsetEntry : storedOffsets.entrySet()) { String offsetCol = storedOffsetEntry.getKey(); String storedOffsetVal = storedOffsetEntry.getValue(); if (startOffsets.get(offsetCol) != null && !startOffsets.get(offsetCol).equals(storedOffsetVal)) { // if the stored offset value is not equal to the starting offset value, it must necessarily be greater // (since records are processed in increasing order w.r.t. the offset column) // therefore, we know that progress has been made in this partition in a previous run if (maxWithData.containsKey(table)) { int partitionSequence = partition.getPartitionSequence(); if (maxWithData.get(table) < partitionSequence) { maxWithData.put(table, partitionSequence); } } } } } } } maxPartitionWithDataPerTable.putAll(maxWithData); } @VisibleForTesting Deque<TableRuntimeContext> getOwnedTablesQueue() { return ownedTablesQueue.get(); } private String getCurrentThreadName() { return Thread.currentThread().getName(); } @VisibleForTesting Multimap<String, TableContext> getRemainingSchemasToTableContexts() { return remainingSchemasToTableContexts; } public Multimap<String, TableContext> getCompletedSchemasToTableContexts() { return completedSchemasToTableContexts; } @VisibleForTesting void offerToOwnedTablesQueue(TableRuntimeContext acquiredTableName, int threadNumber) { getOwnedTablesQueue().offerLast(acquiredTableName); if (LOG.isTraceEnabled()) { LOG.trace("Thread '{}' has acquired table '{}'", getCurrentThreadName(), acquiredTableName.getDescription()); } } /** * Basically acquires more tables for the current thread to work on. * The maximum a thread can hold is upper bounded to the * value the thread number was allocated in {@link #threadNumToMaxTableSlots} * If there are no tables currently owned make a blocking call to {@link #sharedAvailableTablesList} * else simply poll {@link #sharedAvailableTablesList} and it to the {@link #ownedTablesQueue} */ @VisibleForTesting void acquireTableAsNeeded(int threadNumber) throws InterruptedException { if (!getOwnedTablesQueue().isEmpty() && batchTableStrategy == BatchTableStrategy.SWITCH_TABLES) { final TableRuntimeContext lastOwnedPartition = getOwnedTablesQueue().pollLast(); if (getTableContextMap().containsValue(lastOwnedPartition.getSourceTableContext())) { sharedAvailableTablesList.add(lastOwnedPartition); } TableContext lastOwnedTable = lastOwnedPartition.getSourceTableContext(); // need to cycle off all partitions from the same table to the end of the queue TableRuntimeContext first = sharedAvailableTablesList.peekFirst(); while (first != null && first.getSourceTableContext().equals(lastOwnedTable) && !first.equals(lastOwnedPartition)) { if (LOG.isDebugEnabled()) { LOG.debug( "Moving partition {} to end of shared queue to comply with BatchTableStrategy of {}", first.getDescription(), batchTableStrategy.getLabel() ); } // move item from head to tail of list TableRuntimeContext toMove = sharedAvailableTablesList.pollFirst(); sharedAvailableTablesList.add(toMove); // Get the new head of the queue first = sharedAvailableTablesList.peekFirst(); } } if (getOwnedTablesQueue().isEmpty()) { TableRuntimeContext head = sharedAvailableTablesList.pollFirst(); if (head != null) { offerToOwnedTablesQueue(head, threadNumber); } } partitionFirstSharedQueueItemIfNeeded(); } /** * <p>Examines the first item ("head") im the shared partition queue, and adds a new partition if appropriate</p> * <p>A new partition will be created if the number of partitions for the head item's table is still less * than the maximum, and that table itself is partitionable</p> */ @VisibleForTesting void partitionFirstSharedQueueItemIfNeeded() { final TableRuntimeContext headPartition = getOwnedTablesQueue().peek(); if (headPartition != null) { synchronized (partitionStateLock) { keepPartitioningIfNeeded(headPartition); } } else if (LOG.isTraceEnabled()) { LOG.trace("No item at head of shared partition queue"); } } @VisibleForTesting void keepPartitioningIfNeeded(TableRuntimeContext partition) { TableRuntimeContext current = partition; while (current != null && isNewPartitionAllowed(current)) { TableRuntimeContext newPartition = createNextPartition(current); if (newPartition != null) { LOG.info("Adding new partition to shared queue: {}", newPartition.getDescription()); activeRuntimeContexts.put(newPartition.getSourceTableContext(), newPartition); if (!sharedAvailableTablesList.add(newPartition)) { return; } current = newPartition; } else { current = null; } } } @VisibleForTesting boolean isNewPartitionAllowed(TableRuntimeContext partition) { final TableContext tableContext = partition.getSourceTableContext(); if (!partition.isPartitioned() && (tableContext.getPartitioningMode() == PartitioningMode.DISABLED || !tableContext.isPartitionable())) { if (LOG.isDebugEnabled()) { LOG.debug( "Cannot create new partition for ({}) because it is not partitionable, and the underlying table is" + "not partitionable, or it has been disabled", partition.getDescription() ); } return false; } if (tablesWithNoMoreData.contains(tableContext)) { if (LOG.isDebugEnabled()) { LOG.debug( "Cannot create new partition for ({}) because the table has already been marked exhausted in this iteration", partition.getDescription() ); } return false; } final boolean maxOffsetValuesPassed = TableContextUtil.allOffsetsBeyondMaxValues( tableContext, partition.getPartitionOffsetStart() ); final int maxPartitionWithData = getMaxPartitionWithData(tableContext); if (maxOffsetValuesPassed && partition.getPartitionSequence() - maxPartitionWithData > maxNumActivePartitions(tableContext)) { if (LOG.isDebugEnabled()) { LOG.debug( "Cannot create new partition for ({}) because there has been no data seen since partition {}", partition.getDescription(), maxPartitionWithData ); } return false; } // check whether this particular table already has the maximum number of allowed partitions final SortedSet<TableRuntimeContext> runtimeContexts = activeRuntimeContexts.get(tableContext); final int maxNumActivePartitions = maxNumActivePartitions(tableContext); if (maxOffsetValuesPassed && runtimeContexts.size() >= maxNumActivePartitions) { if (LOG.isDebugEnabled()) { LOG.debug( "Cannot create new partition for ({}) because the table has already reached the maximum allowed number of" + " active partitions ({})", partition.getDescription(), maxNumActivePartitions ); } return false; } if (runtimeContexts.size() > 0 && !runtimeContexts.last().equals(partition)) { if (LOG.isDebugEnabled()) { LOG.debug( "Can only create new partition for ({}) if it is the last currently active partition for the table", partition.getDescription() ); } return false; } return true; } @VisibleForTesting int getMaxPartitionWithData(TableContext tableContext) { final Integer maxPartitionWithDataObj = maxPartitionWithDataPerTable.get(tableContext); return maxPartitionWithDataObj != null ? maxPartitionWithDataObj : 0; } @VisibleForTesting int maxNumActivePartitions(TableContext tableContext) { if (tableContext.getMaxNumActivePartitions() > 0) { return tableContext.getMaxNumActivePartitions(); } else { // numThreads * 2 gives enough of a cushion such that enough new partitions can be created so that threads // always have something to work on, while also maintaining previous partitions that may not yet have // finished and been removed from the active ontext return numThreads * 2; } } @VisibleForTesting boolean removePartitionIfNeeded(TableRuntimeContext partition) { final TableContext sourceTableContext = partition.getSourceTableContext(); synchronized (partitionStateLock) { boolean tableExhausted = false; final SortedSet<TableRuntimeContext> activeContexts = activeRuntimeContexts.get(sourceTableContext); final Iterator<TableRuntimeContext> activeContextIter = activeContexts.iterator(); int numActivePartitions = 0; int positionsFromEnd = activeContexts.size(); while (activeContextIter.hasNext()) { final TableRuntimeContext thisPartition = activeContextIter.next(); if (thisPartition.equals(partition)) { final int maxPartitionWithData = getMaxPartitionWithData(partition.getSourceTableContext()); // update max offset values for table, in case new rows have been added since initialization tableMaxOffsetValueUpdater.updateMaxOffsetsForTable(sourceTableContext); final boolean lastPartition = // no currently active partitions for the table numActivePartitions == 0 // and the number of partitions since we last saw data && partition.getPartitionSequence() - maxPartitionWithData // is greater than or equal to the max number of active partitions minus 1 >= (maxNumActivePartitions(sourceTableContext) - 1) && TableContextUtil.allOffsetsBeyondMaxValues( sourceTableContext, partition.getPartitionOffsetStart() ) ; if (!activeContextIter.hasNext() && thisPartition.isMarkedNoMoreData() && (!partition.isPartitioned() || lastPartition)) { // this is the last partition, and was already marked no more data once // now, it's being marked no more data again, so we can safely assume that the table is now exhausted tableExhausted = true; } break; } else if (thisPartition.isMarkedNoMoreData() && activeContextIter.hasNext()) { // this partition has already been marked as no more data once, so it can be removed now // but only if there is at least one more after it, since we want to keep at least one for every table if (positionsFromEnd > maxNumActivePartitions(sourceTableContext) || thisPartition.getPartitionSequence() < getMaxPartitionWithData(thisPartition.getSourceTableContext())) { activeContextIter.remove(); removedPartitions.add(thisPartition); if (!sharedAvailableTablesList.remove(thisPartition)) { if (LOG.isDebugEnabled()) { LOG.debug( "Failed to remove partition {} from sharedAvailableTablesList; it may be owned by another thread", thisPartition.getDescription() ); } } } // else this partition will simply NOT be re-added to the shared queue in the releaseOwnedTable method } else { numActivePartitions++; } positionsFromEnd--; } return tableExhausted; } } /** * Return the next table to work on for the current thread (Will not return null) * Deque the current element from head of the queue and put it back at the tail to queue. */ public TableRuntimeContext nextTable(int threadNumber) throws InterruptedException { synchronized (partitionStateLock) { acquireTableAsNeeded(threadNumber); final TableRuntimeContext partition = getOwnedTablesQueue().pollFirst(); if (partition != null) { offerToOwnedTablesQueue(partition, threadNumber); } return partition; } } @VisibleForTesting TableRuntimeContext createNextPartition(TableRuntimeContext lastContext) { TableRuntimeContext runtimeContext = TableRuntimeContext.createNextPartition(lastContext); TableContext tableContext = lastContext.getSourceTableContext(); if (runtimeContext != null) { if (LOG.isDebugEnabled()) { LOG.debug( "Creating next partition (number {}) for thread '{}' to work on table '{}'", runtimeContext.getPartitionSequence(), getCurrentThreadName(), tableContext.getQualifiedName() ); } if (LOG.isTraceEnabled()) { LOG.trace( "Offsets for table '{}' partition {}: start=({}), max=({})", tableContext.getQualifiedName(), runtimeContext.getPartitionSequence(), runtimeContext.getPartitionOffsetStart(), runtimeContext.getPartitionOffsetEnd() ); } } else { if (LOG.isDebugEnabled()) { LOG.debug( "Could not create next partition (after number {}) for thread '{}' to work on for table '{}' because" + " offsets ({}) have not yet been captured for every offset column ({})", lastContext.getPartitionSequence(), getCurrentThreadName(), tableContext.getQualifiedName(), lastContext.getPartitionOffsetStart(), tableContext.getOffsetColumns() ); } } return runtimeContext; } @VisibleForTesting void releaseOwnedTable(TableRuntimeContext tableRuntimeContext, int threadNumber) { final TableContext sourceContext = tableRuntimeContext.getSourceTableContext(); String tableName = sourceContext.getQualifiedName(); LOG.trace( "Thread '{}' has released ownership for partition '{}'", getCurrentThreadName(), tableRuntimeContext ); //Remove the last element (because we put the current processing element at the tail of dequeue) TableRuntimeContext removedPartition = getOwnedTablesQueue().pollLast(); Utils.checkState( tableRuntimeContext.equals(removedPartition), Utils.format( "Expected table to be remove '{}', Found '{}' at the last of the queue", tableName, removedPartition.getDescription() ) ); synchronized (partitionStateLock) { boolean containsActiveEntry = activeRuntimeContexts.containsEntry(sourceContext, removedPartition); if (containsActiveEntry || sharedAvailableTablesList.isEmpty()) { if (tableRuntimeContext.isUsingNonIncrementalLoad()) { if (LOG.isDebugEnabled()) { LOG.debug("Not re-adding table {} because it is non-incremental", removedPartition.getDescription()); } return; } sharedAvailableTablesList.add(removedPartition); if (!containsActiveEntry) { activeRuntimeContexts.put(sourceContext, removedPartition); } } else { if (LOG.isDebugEnabled()) { LOG.debug( "Not adding partition '{}' back to the shared queue because it was already removed as an active" + " context, and the queue is not empty", removedPartition.getDescription() ); } } } } void reportDataOrNoMoreData( TableRuntimeContext tableRuntimeContext, int recordCount, int batchSize, boolean resultSetEndReached ) { reportDataOrNoMoreData(tableRuntimeContext, recordCount, batchSize, resultSetEndReached, null, null, null); } /** * Each {@link TableJdbcRunnable} worker thread can call this api to update * if there is data/no more data on the current table */ public void reportDataOrNoMoreData( TableRuntimeContext tableRuntimeContext, int recordCount, int batchSize, boolean resultSetEndReached, AtomicBoolean tableFinished, AtomicBoolean schemaFinished, List<String> schemaFinishedTables ) { final TableContext sourceContext = tableRuntimeContext.getSourceTableContext(); // When we see a table with data, we mark isNoMoreDataEventGeneratedAlready to false // so we can generate event again if we don't see data from all tables. if(recordCount > 0) { isNoMoreDataEventGeneratedAlready = false; tablesWithNoMoreData.remove(tableRuntimeContext.getSourceTableContext()); remainingSchemasToTableContexts.put(sourceContext.getSchema(), sourceContext); completedSchemasToTableContexts.remove(sourceContext.getSchema(), sourceContext); } // we need to account for the activeRuntimeContexts here // if there are still other active contexts in process, then this should do "nothing" // if there are not other contexts, we need to figure out what the highest offset completed by the last batch was final boolean noMoreData = recordCount == 0 || resultSetEndReached; if (noMoreData) { tableRuntimeContext.setMarkedNoMoreData(true); } if (recordCount > 0) { maxPartitionWithDataPerTable.put(sourceContext, tableRuntimeContext.getPartitionSequence()); } boolean tableExhausted = removePartitionIfNeeded(tableRuntimeContext); if (noMoreData) { if (tableExhausted) { synchronized (this) { if (LOG.isDebugEnabled()) { LOG.debug( "Table {} exhausted", sourceContext.getQualifiedName() ); } final boolean newlyFinished = tablesWithNoMoreData.add(sourceContext); if (newlyFinished && tableFinished != null) { tableFinished.set(true); } final boolean remainingSchemaChanged = remainingSchemasToTableContexts.remove(sourceContext.getSchema(), sourceContext); completedSchemasToTableContexts.put(sourceContext.getSchema(), sourceContext); if (remainingSchemaChanged && remainingSchemasToTableContexts.get(sourceContext.getSchema()).isEmpty() && schemaFinished != null) { schemaFinished.set(true); if (schemaFinishedTables != null) { completedSchemasToTableContexts.get(sourceContext.getSchema()).forEach( t -> schemaFinishedTables.add(t.getTableName()) ); } } } } } if (LOG.isTraceEnabled()) { LOG.trace( "Just released table {}; Number of Tables With No More Data {}", tableRuntimeContext.getDescription(), tablesWithNoMoreData.size() ); } } /** * Used by the main thread {@link TableJdbcSource} to check whether all * tables have marked no more data * Generate event only if we haven't generate no more data event already * or we have seen a table with records after we generated an event before */ public synchronized boolean shouldGenerateNoMoreDataEvent() { boolean noMoreData = ( !isNoMoreDataEventGeneratedAlready && tablesWithNoMoreData.size() == tableContextMap.size()); if (noMoreData) { isNoMoreDataEventGeneratedAlready = true; } return noMoreData; } public List<TableRuntimeContext> getAndClearRemovedPartitions() { synchronized (partitionStateLock) { final LinkedList<TableRuntimeContext> returnPartitions = new LinkedList<>(removedPartitions); removedPartitions.clear(); return returnPartitions; } } @VisibleForTesting public Map<String, TableContext> getTableContextMap() { return tableContextMap; } @VisibleForTesting LinkedList<TableRuntimeContext> getSharedAvailableTablesList() { return sharedAvailableTablesList; } @VisibleForTesting Set<TableContext> getTablesWithNoMoreData() { return tablesWithNoMoreData; } @VisibleForTesting Map<Integer, Integer> getThreadNumToMaxTableSlots() { return threadNumToMaxTableSlots; } @VisibleForTesting int getNumThreads() { return numThreads; } @VisibleForTesting public ConcurrentMap<TableContext, Integer> getMaxPartitionWithDataPerTable() { return maxPartitionWithDataPerTable; } @VisibleForTesting public SortedSetMultimap<TableContext, TableRuntimeContext> getActiveRuntimeContexts() { return activeRuntimeContexts; } @VisibleForTesting String getAllState() { final StringBuilder sb = new StringBuilder(); sb.append("owned: "); sb.append(getOwnedQueueState()); sb.append("\nshared: "); sb.append(getSharedQueueState()); sb.append("\nactive: "); sb.append(getActiveContextsState()); return sb.toString(); } @VisibleForTesting String getOwnedQueueState() { final StringBuilder sb = new StringBuilder(); for (TableRuntimeContext item : getOwnedTablesQueue()) { if (sb.length() > 0) { sb.append(","); } sb.append(item.getShortDescription()); } return sb.toString(); } @VisibleForTesting String getSharedQueueState() { final StringBuilder sb = new StringBuilder(); for (TableRuntimeContext item : sharedAvailableTablesList) { if (sb.length() > 0) { sb.append(","); } sb.append(item.getShortDescription()); } return sb.toString(); } @VisibleForTesting String getActiveContextsState() { final StringBuilder sb = new StringBuilder(); for (TableContext table : activeRuntimeContexts.keySet()) { if (sb.length() > 0) { sb.append("\n"); } sb.append(table.getQualifiedName()); sb.append(": ["); boolean seen = false; for (TableRuntimeContext partition : activeRuntimeContexts.get(table)) { if (seen) { sb.append(","); } sb.append(partition.getPartitionSequence()); if (partition.isMarkedNoMoreData()) { sb.append("*"); } seen = true; } sb.append(": ]"); } return sb.toString(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.jbpm; import java.net.URL; import java.util.Arrays; import java.util.List; import java.util.Map; import org.apache.camel.spi.Metadata; import org.apache.camel.spi.UriParam; import org.apache.camel.spi.UriParams; import org.apache.camel.spi.UriPath; import org.kie.api.task.model.Task; @UriParams public class JBPMConfiguration { @UriPath @Metadata(required = true) private URL connectionURL; @UriParam(label = "producer", defaultValue = "startProcess") private String operation; @UriParam @Metadata(required = true) private String deploymentId; @UriParam private Long processInstanceId; @UriParam private Object value; @UriParam private String processId; @UriParam private String eventType; @UriParam private Object event; @UriParam private Integer maxNumber; @UriParam private String identifier; @UriParam private Long workItemId; @UriParam private Long taskId; @UriParam private String userId; @UriParam private Integer page = 0; @UriParam private Integer pageSize = 10; @UriParam private String targetUserId; @UriParam private Long attachmentId; @UriParam private Long contentId; @UriParam private Task task; @UriParam(label = "advanced") private List<String> entities; @UriParam(label = "filter") private List<String> statuses; @UriParam(label = "security", secret = true) private String userName; @UriParam(label = "security", secret = true) private String password; @UriParam private Integer timeout; @UriParam(label = "advanced") private Map<String, Object> parameters; @UriParam(label = "advanced") private Class[] extraJaxbClasses; @UriParam private Boolean emitterSendItems; @UriPath private String eventListenerType; public String getOperation() { return operation; } /** * The operation to perform */ public void setOperation(String operation) { this.operation = operation; } public Object getValue() { return value; } /** * the value to assign to the global identifier */ public void setValue(Object value) { this.value = value; } public String getProcessId() { return processId; } /** * the id of the process that should be acted upon */ public void setProcessId(String processId) { this.processId = processId; } public Map<String, Object> getParameters() { return parameters; } /** * the variables that should be set for various operations */ public void setParameters(Map<String, Object> parameters) { this.parameters = parameters; } public Long getProcessInstanceId() { return processInstanceId; } /** * the id of the process instance */ public void setProcessInstanceId(Long processInstanceId) { this.processInstanceId = processInstanceId; } public String getEventType() { return eventType; } /** * the type of event to use when signalEvent operation is performed */ public void setEventType(String eventType) { this.eventType = eventType; } public Object getEvent() { return event; } /** * the data associated with this event when signalEvent operation is performed */ public void setEvent(Object event) { this.event = event; } public Integer getMaxNumber() { return maxNumber; } /** * the maximum number of rules that should be fired */ public void setMaxNumber(Integer maxNumber) { this.maxNumber = maxNumber; } public String getIdentifier() { return identifier; } /** * identifier the global identifier */ public void setIdentifier(String identifier) { this.identifier = identifier; } public Long getWorkItemId() { return workItemId; } /** * the id of the work item */ public void setWorkItemId(Long workItemId) { this.workItemId = workItemId; } public Long getTaskId() { return taskId; } /** * the id of the task */ public void setTaskId(Long taskId) { this.taskId = taskId; } public String getUserId() { return userId; } /** * userId to use with task operations */ public void setUserId(String userId) { this.userId = userId; } public Task getTask() { return task; } /** * The task instance to use with task operations */ public void setTask(Task task) { this.task = task; } public Integer getPage() { return page; } /** * The page to use when retrieving user tasks */ public void setPage(Integer page) { this.page = page; } public Integer getPageSize() { return pageSize; } /** * The page size to use when retrieving user tasks */ public void setPageSize(Integer pageSize) { this.pageSize = pageSize; } public String getTargetUserId() { return targetUserId; } /** * The targetUserId used when delegating a task */ public void setTargetUserId(String targetUserId) { this.targetUserId = targetUserId; } public Long getAttachmentId() { return attachmentId; } /** * attachId to use when retrieving attachments */ public void setAttachmentId(Long attachmentId) { this.attachmentId = attachmentId; } public Long getContentId() { return contentId; } /** * contentId to use when retrieving attachments */ public void setContentId(Long contentId) { this.contentId = contentId; } public List<String> getEntities() { return entities; } /** * The potentialOwners when nominateTask operation is performed */ public void setEntities(List<String> entities) { this.entities = entities; } public List<String> getStatuses() { return statuses; } /** * The list of status to use when filtering tasks */ public void setStatuses(List<String> statuses) { this.statuses = statuses; } public String getUserName() { return userName; } /** * Username for authentication */ public void setUserName(String userName) { this.userName = userName; } public String getPassword() { return password; } /** * Password for authentication */ public void setPassword(String password) { this.password = password; } public URL getConnectionURL() { return connectionURL; } /** * The URL to the jBPM server. */ public void setConnectionURL(URL connectionURL) { this.connectionURL = connectionURL; } public String getDeploymentId() { return deploymentId; } /** * The id of the deployment */ public void setDeploymentId(String deploymentId) { this.deploymentId = deploymentId; } public Integer getTimeout() { return timeout; } /** * A timeout value */ public void setTimeout(Integer timeout) { this.timeout = timeout; } public Class[] getExtraJaxbClasses() { return extraJaxbClasses; } /** * To load additional classes when working with XML */ public void setExtraJaxbClasses(Class[] extraJaxbClasses) { this.extraJaxbClasses = extraJaxbClasses; } public String getEventListenerType() { return eventListenerType; } /** * Sets the event listener type to attach to */ public void setEventListenerType(String eventListenerType) { this.eventListenerType = eventListenerType; } public Boolean getEmitterSendItems() { return emitterSendItems; } /** * Sets if event produced by emitter should be sent as single items or complete collection */ public void setEmitterSendItems(Boolean emiterSendItems) { this.emitterSendItems = emiterSendItems; } @Override public String toString() { return "JBPMConfiguration [connectionURL=" + connectionURL + ", operation=" + operation + ", deploymentId=" + deploymentId + ", processInstanceId=" + processInstanceId + ", value=" + value + ", processId=" + processId + ", eventType=" + eventType + ", event=" + event + ", maxNumber=" + maxNumber + ", identifier=" + identifier + ", workItemId=" + workItemId + ", taskId=" + taskId + ", userId=" + userId + ", page=" + page + ", pageSize=" + pageSize + ", targetUserId=" + targetUserId + ", attachmentId=" + attachmentId + ", contentId=" + contentId + ", task=" + task + ", entities=" + entities + ", statuses=" + statuses + ", userName=" + userName + ", password=" + password + ", timeout=" + timeout + ", parameters=" + parameters + ", extraJaxbClasses=" + Arrays.toString(extraJaxbClasses) + ", eventListenerType=" + eventListenerType + "]"; } }
package org.jdbcquery; import java.io.InputStream; import java.io.Reader; import java.math.BigDecimal; import java.net.URL; import java.sql.Array; import java.sql.Blob; import java.sql.Clob; import java.sql.Date; import java.sql.NClob; import java.sql.Ref; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.RowId; import java.sql.SQLException; import java.sql.SQLWarning; import java.sql.SQLXML; import java.sql.Statement; import java.sql.Time; import java.sql.Timestamp; import java.util.ArrayList; import java.util.Calendar; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Represents a mocked result set. * * @author Troy Histed */ public class MockResultSet implements ResultSet { private final List<LinkedHashMap<String, Object>> values = new ArrayList<LinkedHashMap<String, Object>>(); private LinkedHashMap<String, Object> currentRow = null; private boolean open = true; private int cursorIndex = -1; private boolean wasNull = false; /** * -------------------------- * Mocked methods * -------------------------- */ public void close() throws SQLException { this.open = false; } public boolean isClosed() throws SQLException { return !this.open; } public boolean isAfterLast() throws SQLException { return this.cursorIndex >= this.values.size(); } public boolean next() throws SQLException { if (this.isAfterLast()) { throw new SQLException("Cursor already after last"); } this.cursorIndex += 1; if (this.isAfterLast()) { return false; } this.currentRow = this.values.get(this.cursorIndex); return true; } public boolean wasNull() throws SQLException { return this.wasNull; } public int getRow() throws SQLException { return this.cursorIndex + 1; } /** * -------------------------- * Custom methods * -------------------------- */ /** * @return the values */ public List<LinkedHashMap<String, Object>> getValues() { return this.values; } /** * -------------------------- * Mocked getter methods * -------------------------- */ public String getString(String columnLabel) throws SQLException { final String value = (String) this.currentRow.get(columnLabel); this.wasNull = value == null; return value; } public int getInt(String columnLabel) throws SQLException { final Integer value = (Integer) this.currentRow.get(columnLabel); this.wasNull = value == null; return value == null ? 0 : value.intValue(); } public long getLong(String columnLabel) throws SQLException { final Long value = (Long) this.currentRow.get(columnLabel); this.wasNull = value == null; return value == null ? 0 : value.longValue(); } /** * -------------------------- * Unimplemented methods * -------------------------- */ public String getString(int columnIndex) throws SQLException { throw new IllegalStateException("This is a mock class"); } public int getInt(int columnIndex) throws SQLException { throw new IllegalStateException("This is a mock class"); } public long getLong(int columnIndex) throws SQLException { throw new IllegalStateException("This is a mock class"); } public boolean getBoolean(int columnIndex) throws SQLException { throw new IllegalStateException("This is a mock class"); } public byte getByte(int columnIndex) throws SQLException { throw new IllegalStateException("This is a mock class"); } public short getShort(int columnIndex) throws SQLException { throw new IllegalStateException("This is a mock class"); } public float getFloat(int columnIndex) throws SQLException { throw new IllegalStateException("This is a mock class"); } public double getDouble(int columnIndex) throws SQLException { throw new IllegalStateException("This is a mock class"); } public BigDecimal getBigDecimal(int columnIndex, int scale) throws SQLException { throw new IllegalStateException("This is a mock class"); } public byte[] getBytes(int columnIndex) throws SQLException { throw new IllegalStateException("This is a mock class"); } public Date getDate(int columnIndex) throws SQLException { throw new IllegalStateException("This is a mock class"); } public Time getTime(int columnIndex) throws SQLException { throw new IllegalStateException("This is a mock class"); } public Timestamp getTimestamp(int columnIndex) throws SQLException { throw new IllegalStateException("This is a mock class"); } public boolean getBoolean(String columnLabel) throws SQLException { throw new IllegalStateException("This is a mock class"); } public byte getByte(String columnLabel) throws SQLException { throw new IllegalStateException("This is a mock class"); } public short getShort(String columnLabel) throws SQLException { throw new IllegalStateException("This is a mock class"); } public float getFloat(String columnLabel) throws SQLException { throw new IllegalStateException("This is a mock class"); } public double getDouble(String columnLabel) throws SQLException { throw new IllegalStateException("This is a mock class"); } public BigDecimal getBigDecimal(String columnLabel, int scale) throws SQLException { throw new IllegalStateException("This is a mock class"); } public byte[] getBytes(String columnLabel) throws SQLException { throw new IllegalStateException("This is a mock class"); } public Date getDate(String columnLabel) throws SQLException { throw new IllegalStateException("This is a mock class"); } public Time getTime(String columnLabel) throws SQLException { throw new IllegalStateException("This is a mock class"); } public Timestamp getTimestamp(String columnLabel) throws SQLException { throw new IllegalStateException("This is a mock class"); } public <T> T unwrap(Class<T> iface) throws SQLException { throw new IllegalStateException("This is a mock class"); } public boolean isWrapperFor(Class<?> iface) throws SQLException { throw new IllegalStateException("This is a mock class"); } public InputStream getAsciiStream(int columnIndex) throws SQLException { throw new IllegalStateException("This is a mock class"); } public InputStream getUnicodeStream(int columnIndex) throws SQLException { throw new IllegalStateException("This is a mock class"); } public InputStream getBinaryStream(int columnIndex) throws SQLException { throw new IllegalStateException("This is a mock class"); } public InputStream getAsciiStream(String columnLabel) throws SQLException { throw new IllegalStateException("This is a mock class"); } public InputStream getUnicodeStream(String columnLabel) throws SQLException { throw new IllegalStateException("This is a mock class"); } public InputStream getBinaryStream(String columnLabel) throws SQLException { throw new IllegalStateException("This is a mock class"); } public SQLWarning getWarnings() throws SQLException { throw new IllegalStateException("This is a mock class"); } public void clearWarnings() throws SQLException { throw new IllegalStateException("This is a mock class"); } public String getCursorName() throws SQLException { throw new IllegalStateException("This is a mock class"); } public ResultSetMetaData getMetaData() throws SQLException { throw new IllegalStateException("This is a mock class"); } public Object getObject(int columnIndex) throws SQLException { throw new IllegalStateException("This is a mock class"); } public Object getObject(String columnLabel) throws SQLException { throw new IllegalStateException("This is a mock class"); } public int findColumn(String columnLabel) throws SQLException { throw new IllegalStateException("This is a mock class"); } public Reader getCharacterStream(int columnIndex) throws SQLException { throw new IllegalStateException("This is a mock class"); } public Reader getCharacterStream(String columnLabel) throws SQLException { throw new IllegalStateException("This is a mock class"); } public BigDecimal getBigDecimal(int columnIndex) throws SQLException { throw new IllegalStateException("This is a mock class"); } public BigDecimal getBigDecimal(String columnLabel) throws SQLException { throw new IllegalStateException("This is a mock class"); } public boolean isBeforeFirst() throws SQLException { throw new IllegalStateException("This is a mock class"); } public boolean isFirst() throws SQLException { throw new IllegalStateException("This is a mock class"); } public boolean isLast() throws SQLException { throw new IllegalStateException("This is a mock class"); } public void beforeFirst() throws SQLException { throw new IllegalStateException("This is a mock class"); } public void afterLast() throws SQLException { throw new IllegalStateException("This is a mock class"); } public boolean first() throws SQLException { throw new IllegalStateException("This is a mock class"); } public boolean last() throws SQLException { throw new IllegalStateException("This is a mock class"); } public boolean absolute(int row) throws SQLException { throw new IllegalStateException("This is a mock class"); } public boolean relative(int rows) throws SQLException { throw new IllegalStateException("This is a mock class"); } public boolean previous() throws SQLException { throw new IllegalStateException("This is a mock class"); } public void setFetchDirection(int direction) throws SQLException { throw new IllegalStateException("This is a mock class"); } public int getFetchDirection() throws SQLException { throw new IllegalStateException("This is a mock class"); } public void setFetchSize(int rows) throws SQLException { throw new IllegalStateException("This is a mock class"); } public int getFetchSize() throws SQLException { throw new IllegalStateException("This is a mock class"); } public int getType() throws SQLException { throw new IllegalStateException("This is a mock class"); } public int getConcurrency() throws SQLException { throw new IllegalStateException("This is a mock class"); } public boolean rowUpdated() throws SQLException { throw new IllegalStateException("This is a mock class"); } public boolean rowInserted() throws SQLException { throw new IllegalStateException("This is a mock class"); } public boolean rowDeleted() throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateNull(int columnIndex) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateBoolean(int columnIndex, boolean x) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateByte(int columnIndex, byte x) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateShort(int columnIndex, short x) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateInt(int columnIndex, int x) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateLong(int columnIndex, long x) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateFloat(int columnIndex, float x) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateDouble(int columnIndex, double x) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateBigDecimal(int columnIndex, BigDecimal x) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateString(int columnIndex, String x) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateBytes(int columnIndex, byte[] x) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateDate(int columnIndex, Date x) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateTime(int columnIndex, Time x) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateTimestamp(int columnIndex, Timestamp x) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateAsciiStream(int columnIndex, InputStream x, int length) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateBinaryStream(int columnIndex, InputStream x, int length) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateCharacterStream(int columnIndex, Reader x, int length) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateObject(int columnIndex, Object x, int scaleOrLength) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateObject(int columnIndex, Object x) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateNull(String columnLabel) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateBoolean(String columnLabel, boolean x) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateByte(String columnLabel, byte x) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateShort(String columnLabel, short x) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateInt(String columnLabel, int x) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateLong(String columnLabel, long x) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateFloat(String columnLabel, float x) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateDouble(String columnLabel, double x) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateBigDecimal(String columnLabel, BigDecimal x) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateString(String columnLabel, String x) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateBytes(String columnLabel, byte[] x) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateDate(String columnLabel, Date x) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateTime(String columnLabel, Time x) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateTimestamp(String columnLabel, Timestamp x) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateAsciiStream(String columnLabel, InputStream x, int length) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateBinaryStream(String columnLabel, InputStream x, int length) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateCharacterStream(String columnLabel, Reader reader, int length) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateObject(String columnLabel, Object x, int scaleOrLength) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateObject(String columnLabel, Object x) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void insertRow() throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateRow() throws SQLException { throw new IllegalStateException("This is a mock class"); } public void deleteRow() throws SQLException { throw new IllegalStateException("This is a mock class"); } public void refreshRow() throws SQLException { throw new IllegalStateException("This is a mock class"); } public void cancelRowUpdates() throws SQLException { throw new IllegalStateException("This is a mock class"); } public void moveToInsertRow() throws SQLException { throw new IllegalStateException("This is a mock class"); } public void moveToCurrentRow() throws SQLException { throw new IllegalStateException("This is a mock class"); } public Statement getStatement() throws SQLException { throw new IllegalStateException("This is a mock class"); } public Object getObject(int columnIndex, Map<String, Class<?>> map) throws SQLException { throw new IllegalStateException("This is a mock class"); } public Ref getRef(int columnIndex) throws SQLException { throw new IllegalStateException("This is a mock class"); } public Blob getBlob(int columnIndex) throws SQLException { throw new IllegalStateException("This is a mock class"); } public Clob getClob(int columnIndex) throws SQLException { throw new IllegalStateException("This is a mock class"); } public Array getArray(int columnIndex) throws SQLException { throw new IllegalStateException("This is a mock class"); } public Object getObject(String columnLabel, Map<String, Class<?>> map) throws SQLException { throw new IllegalStateException("This is a mock class"); } public Ref getRef(String columnLabel) throws SQLException { throw new IllegalStateException("This is a mock class"); } public Blob getBlob(String columnLabel) throws SQLException { throw new IllegalStateException("This is a mock class"); } public Clob getClob(String columnLabel) throws SQLException { throw new IllegalStateException("This is a mock class"); } public Array getArray(String columnLabel) throws SQLException { throw new IllegalStateException("This is a mock class"); } public Date getDate(int columnIndex, Calendar cal) throws SQLException { throw new IllegalStateException("This is a mock class"); } public Date getDate(String columnLabel, Calendar cal) throws SQLException { throw new IllegalStateException("This is a mock class"); } public Time getTime(int columnIndex, Calendar cal) throws SQLException { throw new IllegalStateException("This is a mock class"); } public Time getTime(String columnLabel, Calendar cal) throws SQLException { throw new IllegalStateException("This is a mock class"); } public Timestamp getTimestamp(int columnIndex, Calendar cal) throws SQLException { throw new IllegalStateException("This is a mock class"); } public Timestamp getTimestamp(String columnLabel, Calendar cal) throws SQLException { throw new IllegalStateException("This is a mock class"); } public URL getURL(int columnIndex) throws SQLException { throw new IllegalStateException("This is a mock class"); } public URL getURL(String columnLabel) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateRef(int columnIndex, Ref x) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateRef(String columnLabel, Ref x) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateBlob(int columnIndex, Blob x) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateBlob(String columnLabel, Blob x) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateClob(int columnIndex, Clob x) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateClob(String columnLabel, Clob x) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateArray(int columnIndex, Array x) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateArray(String columnLabel, Array x) throws SQLException { throw new IllegalStateException("This is a mock class"); } public RowId getRowId(int columnIndex) throws SQLException { throw new IllegalStateException("This is a mock class"); } public RowId getRowId(String columnLabel) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateRowId(int columnIndex, RowId x) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateRowId(String columnLabel, RowId x) throws SQLException { throw new IllegalStateException("This is a mock class"); } public int getHoldability() throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateNString(int columnIndex, String nString) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateNString(String columnLabel, String nString) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateNClob(int columnIndex, NClob nClob) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateNClob(String columnLabel, NClob nClob) throws SQLException { throw new IllegalStateException("This is a mock class"); } public NClob getNClob(int columnIndex) throws SQLException { throw new IllegalStateException("This is a mock class"); } public NClob getNClob(String columnLabel) throws SQLException { throw new IllegalStateException("This is a mock class"); } public SQLXML getSQLXML(int columnIndex) throws SQLException { throw new IllegalStateException("This is a mock class"); } public SQLXML getSQLXML(String columnLabel) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateSQLXML(int columnIndex, SQLXML xmlObject) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateSQLXML(String columnLabel, SQLXML xmlObject) throws SQLException { throw new IllegalStateException("This is a mock class"); } public String getNString(int columnIndex) throws SQLException { throw new IllegalStateException("This is a mock class"); } public String getNString(String columnLabel) throws SQLException { throw new IllegalStateException("This is a mock class"); } public Reader getNCharacterStream(int columnIndex) throws SQLException { throw new IllegalStateException("This is a mock class"); } public Reader getNCharacterStream(String columnLabel) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateNCharacterStream(int columnIndex, Reader x, long length) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateNCharacterStream(String columnLabel, Reader reader, long length) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateAsciiStream(int columnIndex, InputStream x, long length) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateBinaryStream(int columnIndex, InputStream x, long length) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateCharacterStream(int columnIndex, Reader x, long length) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateAsciiStream(String columnLabel, InputStream x, long length) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateBinaryStream(String columnLabel, InputStream x, long length) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateCharacterStream(String columnLabel, Reader reader, long length) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateBlob(int columnIndex, InputStream inputStream, long length) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateBlob(String columnLabel, InputStream inputStream, long length) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateClob(int columnIndex, Reader reader, long length) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateClob(String columnLabel, Reader reader, long length) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateNClob(int columnIndex, Reader reader, long length) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateNClob(String columnLabel, Reader reader, long length) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateNCharacterStream(int columnIndex, Reader x) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateNCharacterStream(String columnLabel, Reader reader) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateAsciiStream(int columnIndex, InputStream x) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateBinaryStream(int columnIndex, InputStream x) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateCharacterStream(int columnIndex, Reader x) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateAsciiStream(String columnLabel, InputStream x) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateBinaryStream(String columnLabel, InputStream x) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateCharacterStream(String columnLabel, Reader reader) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateBlob(int columnIndex, InputStream inputStream) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateBlob(String columnLabel, InputStream inputStream) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateClob(int columnIndex, Reader reader) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateClob(String columnLabel, Reader reader) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateNClob(int columnIndex, Reader reader) throws SQLException { throw new IllegalStateException("This is a mock class"); } public void updateNClob(String columnLabel, Reader reader) throws SQLException { throw new IllegalStateException("This is a mock class"); } public <T> T getObject(int columnIndex, Class<T> type) throws SQLException { throw new IllegalStateException("This is a mock class"); } public <T> T getObject(String columnLabel, Class<T> type) throws SQLException { throw new IllegalStateException("This is a mock class"); } }
/* * Copyright (C) 2019 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.google.android.exoplayer2.text.ssa; import static com.google.android.exoplayer2.text.ssa.SsaDecoder.STYLE_LINE_PREFIX; import static com.google.android.exoplayer2.util.Assertions.checkArgument; import static java.lang.annotation.ElementType.TYPE_USE; import static java.lang.annotation.RetentionPolicy.SOURCE; import android.graphics.Color; import android.graphics.PointF; import android.text.TextUtils; import androidx.annotation.ColorInt; import androidx.annotation.IntDef; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.text.Cue; import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Log; import com.google.android.exoplayer2.util.Util; import com.google.common.base.Ascii; import com.google.common.primitives.Ints; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.Target; import java.util.regex.Matcher; import java.util.regex.Pattern; /** Represents a line from an SSA/ASS {@code [V4+ Styles]} section. */ /* package */ final class SsaStyle { private static final String TAG = "SsaStyle"; /** * The SSA/ASS alignments. * * <p>Allowed values: * * <ul> * <li>{@link #SSA_ALIGNMENT_UNKNOWN} * <li>{@link #SSA_ALIGNMENT_BOTTOM_LEFT} * <li>{@link #SSA_ALIGNMENT_BOTTOM_CENTER} * <li>{@link #SSA_ALIGNMENT_BOTTOM_RIGHT} * <li>{@link #SSA_ALIGNMENT_MIDDLE_LEFT} * <li>{@link #SSA_ALIGNMENT_MIDDLE_CENTER} * <li>{@link #SSA_ALIGNMENT_MIDDLE_RIGHT} * <li>{@link #SSA_ALIGNMENT_TOP_LEFT} * <li>{@link #SSA_ALIGNMENT_TOP_CENTER} * <li>{@link #SSA_ALIGNMENT_TOP_RIGHT} * </ul> */ @Target(TYPE_USE) @IntDef({ SSA_ALIGNMENT_UNKNOWN, SSA_ALIGNMENT_BOTTOM_LEFT, SSA_ALIGNMENT_BOTTOM_CENTER, SSA_ALIGNMENT_BOTTOM_RIGHT, SSA_ALIGNMENT_MIDDLE_LEFT, SSA_ALIGNMENT_MIDDLE_CENTER, SSA_ALIGNMENT_MIDDLE_RIGHT, SSA_ALIGNMENT_TOP_LEFT, SSA_ALIGNMENT_TOP_CENTER, SSA_ALIGNMENT_TOP_RIGHT, }) @Documented @Retention(SOURCE) public @interface SsaAlignment {} // The numbering follows the ASS (v4+) spec (i.e. the points on the number pad). public static final int SSA_ALIGNMENT_UNKNOWN = -1; public static final int SSA_ALIGNMENT_BOTTOM_LEFT = 1; public static final int SSA_ALIGNMENT_BOTTOM_CENTER = 2; public static final int SSA_ALIGNMENT_BOTTOM_RIGHT = 3; public static final int SSA_ALIGNMENT_MIDDLE_LEFT = 4; public static final int SSA_ALIGNMENT_MIDDLE_CENTER = 5; public static final int SSA_ALIGNMENT_MIDDLE_RIGHT = 6; public static final int SSA_ALIGNMENT_TOP_LEFT = 7; public static final int SSA_ALIGNMENT_TOP_CENTER = 8; public static final int SSA_ALIGNMENT_TOP_RIGHT = 9; public final String name; public final @SsaAlignment int alignment; @Nullable @ColorInt public final Integer primaryColor; public final float fontSize; public final boolean bold; public final boolean italic; public final boolean underline; public final boolean strikeout; private SsaStyle( String name, @SsaAlignment int alignment, @Nullable @ColorInt Integer primaryColor, float fontSize, boolean bold, boolean italic, boolean underline, boolean strikeout) { this.name = name; this.alignment = alignment; this.primaryColor = primaryColor; this.fontSize = fontSize; this.bold = bold; this.italic = italic; this.underline = underline; this.strikeout = strikeout; } @Nullable public static SsaStyle fromStyleLine(String styleLine, Format format) { checkArgument(styleLine.startsWith(STYLE_LINE_PREFIX)); String[] styleValues = TextUtils.split(styleLine.substring(STYLE_LINE_PREFIX.length()), ","); if (styleValues.length != format.length) { Log.w( TAG, Util.formatInvariant( "Skipping malformed 'Style:' line (expected %s values, found %s): '%s'", format.length, styleValues.length, styleLine)); return null; } try { return new SsaStyle( styleValues[format.nameIndex].trim(), format.alignmentIndex != C.INDEX_UNSET ? parseAlignment(styleValues[format.alignmentIndex].trim()) : SSA_ALIGNMENT_UNKNOWN, format.primaryColorIndex != C.INDEX_UNSET ? parseColor(styleValues[format.primaryColorIndex].trim()) : null, format.fontSizeIndex != C.INDEX_UNSET ? parseFontSize(styleValues[format.fontSizeIndex].trim()) : Cue.DIMEN_UNSET, format.boldIndex != C.INDEX_UNSET && parseBooleanValue(styleValues[format.boldIndex].trim()), format.italicIndex != C.INDEX_UNSET && parseBooleanValue(styleValues[format.italicIndex].trim()), format.underlineIndex != C.INDEX_UNSET && parseBooleanValue(styleValues[format.underlineIndex].trim()), format.strikeoutIndex != C.INDEX_UNSET && parseBooleanValue(styleValues[format.strikeoutIndex].trim())); } catch (RuntimeException e) { Log.w(TAG, "Skipping malformed 'Style:' line: '" + styleLine + "'", e); return null; } } private static @SsaAlignment int parseAlignment(String alignmentStr) { try { @SsaAlignment int alignment = Integer.parseInt(alignmentStr.trim()); if (isValidAlignment(alignment)) { return alignment; } } catch (NumberFormatException e) { // Swallow the exception and return UNKNOWN below. } Log.w(TAG, "Ignoring unknown alignment: " + alignmentStr); return SSA_ALIGNMENT_UNKNOWN; } private static boolean isValidAlignment(@SsaAlignment int alignment) { switch (alignment) { case SSA_ALIGNMENT_BOTTOM_CENTER: case SSA_ALIGNMENT_BOTTOM_LEFT: case SSA_ALIGNMENT_BOTTOM_RIGHT: case SSA_ALIGNMENT_MIDDLE_CENTER: case SSA_ALIGNMENT_MIDDLE_LEFT: case SSA_ALIGNMENT_MIDDLE_RIGHT: case SSA_ALIGNMENT_TOP_CENTER: case SSA_ALIGNMENT_TOP_LEFT: case SSA_ALIGNMENT_TOP_RIGHT: return true; case SSA_ALIGNMENT_UNKNOWN: default: return false; } } /** * Parses a SSA V4+ color expression. * * <p>A SSA V4+ color can be represented in hex {@code ("&HAABBGGRR")} or in 64-bit decimal format * (byte order AABBGGRR). In both cases the alpha channel's value needs to be inverted because in * SSA the 0xFF alpha value means transparent and 0x00 means opaque which is the opposite from the * Android {@link ColorInt} representation. * * @param ssaColorExpression A SSA V4+ color expression. * @return The parsed color value, or null if parsing failed. */ @Nullable @ColorInt public static Integer parseColor(String ssaColorExpression) { // We use a long because the value is an unsigned 32-bit number, so can be larger than // Integer.MAX_VALUE. long abgr; try { abgr = ssaColorExpression.startsWith("&H") // Parse color from hex format (&HAABBGGRR). ? Long.parseLong(ssaColorExpression.substring(2), /* radix= */ 16) // Parse color from decimal format (bytes order AABBGGRR). : Long.parseLong(ssaColorExpression); // Ensure only the bottom 4 bytes of abgr are set. checkArgument(abgr <= 0xFFFFFFFFL); } catch (IllegalArgumentException e) { Log.w(TAG, "Failed to parse color expression: '" + ssaColorExpression + "'", e); return null; } // Convert ABGR to ARGB. int a = Ints.checkedCast(((abgr >> 24) & 0xFF) ^ 0xFF); // Flip alpha. int b = Ints.checkedCast((abgr >> 16) & 0xFF); int g = Ints.checkedCast((abgr >> 8) & 0xFF); int r = Ints.checkedCast(abgr & 0xFF); return Color.argb(a, r, g, b); } private static float parseFontSize(String fontSize) { try { return Float.parseFloat(fontSize); } catch (NumberFormatException e) { Log.w(TAG, "Failed to parse font size: '" + fontSize + "'", e); return Cue.DIMEN_UNSET; } } private static boolean parseBooleanValue(String booleanValue) { try { int value = Integer.parseInt(booleanValue); return value == 1 || value == -1; } catch (NumberFormatException e) { Log.w(TAG, "Failed to parse boolean value: '" + booleanValue + "'", e); return false; } } /** * Represents a {@code Format:} line from the {@code [V4+ Styles]} section * * <p>The indices are used to determine the location of particular properties in each {@code * Style:} line. */ /* package */ static final class Format { public final int nameIndex; public final int alignmentIndex; public final int primaryColorIndex; public final int fontSizeIndex; public final int boldIndex; public final int italicIndex; public final int underlineIndex; public final int strikeoutIndex; public final int length; private Format( int nameIndex, int alignmentIndex, int primaryColorIndex, int fontSizeIndex, int boldIndex, int italicIndex, int underlineIndex, int strikeoutIndex, int length) { this.nameIndex = nameIndex; this.alignmentIndex = alignmentIndex; this.primaryColorIndex = primaryColorIndex; this.fontSizeIndex = fontSizeIndex; this.boldIndex = boldIndex; this.italicIndex = italicIndex; this.underlineIndex = underlineIndex; this.strikeoutIndex = strikeoutIndex; this.length = length; } /** * Parses the format info from a 'Format:' line in the [V4+ Styles] section. * * @return the parsed info, or null if {@code styleFormatLine} doesn't contain 'name'. */ @Nullable public static Format fromFormatLine(String styleFormatLine) { int nameIndex = C.INDEX_UNSET; int alignmentIndex = C.INDEX_UNSET; int primaryColorIndex = C.INDEX_UNSET; int fontSizeIndex = C.INDEX_UNSET; int boldIndex = C.INDEX_UNSET; int italicIndex = C.INDEX_UNSET; int underlineIndex = C.INDEX_UNSET; int strikeoutIndex = C.INDEX_UNSET; String[] keys = TextUtils.split(styleFormatLine.substring(SsaDecoder.FORMAT_LINE_PREFIX.length()), ","); for (int i = 0; i < keys.length; i++) { switch (Ascii.toLowerCase(keys[i].trim())) { case "name": nameIndex = i; break; case "alignment": alignmentIndex = i; break; case "primarycolour": primaryColorIndex = i; break; case "fontsize": fontSizeIndex = i; break; case "bold": boldIndex = i; break; case "italic": italicIndex = i; break; case "underline": underlineIndex = i; break; case "strikeout": strikeoutIndex = i; break; } } return nameIndex != C.INDEX_UNSET ? new Format( nameIndex, alignmentIndex, primaryColorIndex, fontSizeIndex, boldIndex, italicIndex, underlineIndex, strikeoutIndex, keys.length) : null; } } /** * Represents the style override information parsed from an SSA/ASS dialogue line. * * <p>Overrides are contained in braces embedded in the dialogue text of the cue. */ /* package */ static final class Overrides { private static final String TAG = "SsaStyle.Overrides"; /** Matches "{foo}" and returns "foo" in group 1 */ // Warning that \\} can be replaced with } is bogus [internal: b/144480183]. private static final Pattern BRACES_PATTERN = Pattern.compile("\\{([^}]*)\\}"); private static final String PADDED_DECIMAL_PATTERN = "\\s*\\d+(?:\\.\\d+)?\\s*"; /** Matches "\pos(x,y)" and returns "x" in group 1 and "y" in group 2 */ private static final Pattern POSITION_PATTERN = Pattern.compile(Util.formatInvariant("\\\\pos\\((%1$s),(%1$s)\\)", PADDED_DECIMAL_PATTERN)); /** Matches "\move(x1,y1,x2,y2[,t1,t2])" and returns "x2" in group 1 and "y2" in group 2 */ private static final Pattern MOVE_PATTERN = Pattern.compile( Util.formatInvariant( "\\\\move\\(%1$s,%1$s,(%1$s),(%1$s)(?:,%1$s,%1$s)?\\)", PADDED_DECIMAL_PATTERN)); /** Matches "\anx" and returns x in group 1 */ private static final Pattern ALIGNMENT_OVERRIDE_PATTERN = Pattern.compile("\\\\an(\\d+)"); public final @SsaAlignment int alignment; @Nullable public final PointF position; private Overrides(@SsaAlignment int alignment, @Nullable PointF position) { this.alignment = alignment; this.position = position; } public static Overrides parseFromDialogue(String text) { @SsaAlignment int alignment = SSA_ALIGNMENT_UNKNOWN; PointF position = null; Matcher matcher = BRACES_PATTERN.matcher(text); while (matcher.find()) { String braceContents = Assertions.checkNotNull(matcher.group(1)); try { PointF parsedPosition = parsePosition(braceContents); if (parsedPosition != null) { position = parsedPosition; } } catch (RuntimeException e) { // Ignore invalid \pos() or \move() function. } try { @SsaAlignment int parsedAlignment = parseAlignmentOverride(braceContents); if (parsedAlignment != SSA_ALIGNMENT_UNKNOWN) { alignment = parsedAlignment; } } catch (RuntimeException e) { // Ignore invalid \an alignment override. } } return new Overrides(alignment, position); } public static String stripStyleOverrides(String dialogueLine) { return BRACES_PATTERN.matcher(dialogueLine).replaceAll(""); } /** * Parses the position from a style override, returns null if no position is found. * * <p>The attribute is expected to be in the form {@code \pos(x,y)} or {@code * \move(x1,y1,x2,y2,startTime,endTime)} (startTime and endTime are optional). In the case of * {@code \move()}, this returns {@code (x2, y2)} (i.e. the end position of the move). * * @param styleOverride The string to parse. * @return The parsed position, or null if no position is found. */ @Nullable private static PointF parsePosition(String styleOverride) { Matcher positionMatcher = POSITION_PATTERN.matcher(styleOverride); Matcher moveMatcher = MOVE_PATTERN.matcher(styleOverride); boolean hasPosition = positionMatcher.find(); boolean hasMove = moveMatcher.find(); String x; String y; if (hasPosition) { if (hasMove) { Log.i( TAG, "Override has both \\pos(x,y) and \\move(x1,y1,x2,y2); using \\pos values. override='" + styleOverride + "'"); } x = positionMatcher.group(1); y = positionMatcher.group(2); } else if (hasMove) { x = moveMatcher.group(1); y = moveMatcher.group(2); } else { return null; } return new PointF( Float.parseFloat(Assertions.checkNotNull(x).trim()), Float.parseFloat(Assertions.checkNotNull(y).trim())); } private static @SsaAlignment int parseAlignmentOverride(String braceContents) { Matcher matcher = ALIGNMENT_OVERRIDE_PATTERN.matcher(braceContents); return matcher.find() ? parseAlignment(Assertions.checkNotNull(matcher.group(1))) : SSA_ALIGNMENT_UNKNOWN; } } }
/* * Copyright (C) 2012 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package android.os; import android.util.Log; /** * Writes trace events to the system trace buffer. These trace events can be * collected and visualized using the Systrace tool. * * This tracing mechanism is independent of the method tracing mechanism * offered by {@link Debug#startMethodTracing}. In particular, it enables * tracing of events that occur across multiple processes. */ public final class Trace { /* * Writes trace events to the kernel trace buffer. These trace events can be * collected using the "atrace" program for offline analysis. */ private static final String TAG = "Trace"; // These tags must be kept in sync with system/core/include/cutils/trace.h. /** @hide */ public static final long TRACE_TAG_NEVER = 0; /** @hide */ public static final long TRACE_TAG_ALWAYS = 1L << 0; /** @hide */ public static final long TRACE_TAG_GRAPHICS = 1L << 1; /** @hide */ public static final long TRACE_TAG_INPUT = 1L << 2; /** @hide */ public static final long TRACE_TAG_VIEW = 1L << 3; /** @hide */ public static final long TRACE_TAG_WEBVIEW = 1L << 4; /** @hide */ public static final long TRACE_TAG_WINDOW_MANAGER = 1L << 5; /** @hide */ public static final long TRACE_TAG_ACTIVITY_MANAGER = 1L << 6; /** @hide */ public static final long TRACE_TAG_SYNC_MANAGER = 1L << 7; /** @hide */ public static final long TRACE_TAG_AUDIO = 1L << 8; /** @hide */ public static final long TRACE_TAG_VIDEO = 1L << 9; /** @hide */ public static final long TRACE_TAG_CAMERA = 1L << 10; /** @hide */ public static final long TRACE_TAG_HAL = 1L << 11; /** @hide */ public static final long TRACE_TAG_APP = 1L << 12; /** @hide */ public static final long TRACE_TAG_RESOURCES = 1L << 13; /** @hide */ public static final long TRACE_TAG_DALVIK = 1L << 14; /** @hide */ public static final long TRACE_TAG_RS = 1L << 15; private static final long TRACE_TAG_NOT_READY = 1L << 63; private static final int MAX_SECTION_NAME_LEN = 127; // Must be volatile to avoid word tearing. private static volatile long sEnabledTags = TRACE_TAG_NOT_READY; private static native long nativeGetEnabledTags(); private static native void nativeTraceCounter(long tag, String name, int value); private static native void nativeTraceBegin(long tag, String name); private static native void nativeTraceEnd(long tag); private static native void nativeAsyncTraceBegin(long tag, String name, int cookie); private static native void nativeAsyncTraceEnd(long tag, String name, int cookie); private static native void nativeSetAppTracingAllowed(boolean allowed); private static native void nativeSetTracingEnabled(boolean allowed); static { // We configure two separate change callbacks, one in Trace.cpp and one here. The // native callback reads the tags from the system property, and this callback // reads the value that the native code retrieved. It's essential that the native // callback executes first. // // The system provides ordering through a priority level. Callbacks made through // SystemProperties.addChangeCallback currently have a negative priority, while // our native code is using a priority of zero. SystemProperties.addChangeCallback(new Runnable() { @Override public void run() { cacheEnabledTags(); } }); } private Trace() { } /** * Caches a copy of the enabled-tag bits. The "master" copy is held by the native code, * and comes from the PROPERTY_TRACE_TAG_ENABLEFLAGS property. * <p> * If the native code hasn't yet read the property, we will cause it to do one-time * initialization. We don't want to do this during class init, because this class is * preloaded, so all apps would be stuck with whatever the zygote saw. (The zygote * doesn't see the system-property update broadcasts.) * <p> * We want to defer initialization until the first use by an app, post-zygote. * <p> * We're okay if multiple threads call here simultaneously -- the native state is * synchronized, and sEnabledTags is volatile (prevents word tearing). */ private static long cacheEnabledTags() { long tags = nativeGetEnabledTags(); sEnabledTags = tags; return tags; } /** * Returns true if a trace tag is enabled. * * @param traceTag The trace tag to check. * @return True if the trace tag is valid. * * @hide */ public static boolean isTagEnabled(long traceTag) { long tags = sEnabledTags; if (tags == TRACE_TAG_NOT_READY) { tags = cacheEnabledTags(); } return (tags & traceTag) != 0; } /** * Writes trace message to indicate the value of a given counter. * * @param traceTag The trace tag. * @param counterName The counter name to appear in the trace. * @param counterValue The counter value. * * @hide */ public static void traceCounter(long traceTag, String counterName, int counterValue) { if (isTagEnabled(traceTag)) { nativeTraceCounter(traceTag, counterName, counterValue); } } /** * Set whether application tracing is allowed for this process. This is intended to be set * once at application start-up time based on whether the application is debuggable. * * @hide */ public static void setAppTracingAllowed(boolean allowed) { nativeSetAppTracingAllowed(allowed); // Setting whether app tracing is allowed may change the tags, so we update the cached // tags here. cacheEnabledTags(); } /** * Set whether tracing is enabled in this process. Tracing is disabled shortly after Zygote * initializes and re-enabled after processes fork from Zygote. This is done because Zygote * has no way to be notified about changes to the tracing tags, and if Zygote ever reads and * caches the tracing tags, forked processes will inherit those stale tags. * * @hide */ public static void setTracingEnabled(boolean enabled) { nativeSetTracingEnabled(enabled); // Setting whether tracing is enabled may change the tags, so we update the cached tags // here. cacheEnabledTags(); } /** * Writes a trace message to indicate that a given section of code has * begun. Must be followed by a call to {@link #traceEnd} using the same * tag. * * @param traceTag The trace tag. * @param methodName The method name to appear in the trace. * * @hide */ public static void traceBegin(long traceTag, String methodName) { if (isTagEnabled(traceTag)) { nativeTraceBegin(traceTag, methodName); } } /** * Writes a trace message to indicate that the current method has ended. * Must be called exactly once for each call to {@link #traceBegin} using the same tag. * * @param traceTag The trace tag. * * @hide */ public static void traceEnd(long traceTag) { if (isTagEnabled(traceTag)) { nativeTraceEnd(traceTag); } } /** * Writes a trace message to indicate that a given section of code has * begun. Must be followed by a call to {@link #asyncTraceEnd} using the same * tag. Unlike {@link #traceBegin(long, String)} and {@link #traceEnd(long)}, * asynchronous events do not need to be nested. The name and cookie used to * begin an event must be used to end it. * * @param traceTag The trace tag. * @param methodName The method name to appear in the trace. * @param cookie Unique identifier for distinguishing simultaneous events * * @hide */ public static void asyncTraceBegin(long traceTag, String methodName, int cookie) { if (isTagEnabled(traceTag)) { nativeAsyncTraceBegin(traceTag, methodName, cookie); } } /** * Writes a trace message to indicate that the current method has ended. * Must be called exactly once for each call to {@link #asyncTraceBegin(long, String, int)} * using the same tag, name and cookie. * * @param traceTag The trace tag. * @param methodName The method name to appear in the trace. * @param cookie Unique identifier for distinguishing simultaneous events * * @hide */ public static void asyncTraceEnd(long traceTag, String methodName, int cookie) { if (isTagEnabled(traceTag)) { nativeAsyncTraceEnd(traceTag, methodName, cookie); } } /** * Writes a trace message to indicate that a given section of code has begun. This call must * be followed by a corresponding call to {@link #endSection()} on the same thread. * * <p class="note"> At this time the vertical bar character '|', newline character '\n', and * null character '\0' are used internally by the tracing mechanism. If sectionName contains * these characters they will be replaced with a space character in the trace. * * @param sectionName The name of the code section to appear in the trace. This may be at * most 127 Unicode code units long. */ public static void beginSection(String sectionName) { if (isTagEnabled(TRACE_TAG_APP)) { if (sectionName.length() > MAX_SECTION_NAME_LEN) { throw new IllegalArgumentException("sectionName is too long"); } nativeTraceBegin(TRACE_TAG_APP, sectionName); } } /** * Writes a trace message to indicate that a given section of code has ended. This call must * be preceeded by a corresponding call to {@link #beginSection(String)}. Calling this method * will mark the end of the most recently begun section of code, so care must be taken to * ensure that beginSection / endSection pairs are properly nested and called from the same * thread. */ public static void endSection() { if (isTagEnabled(TRACE_TAG_APP)) { nativeTraceEnd(TRACE_TAG_APP); } } }
package butterknife; import com.google.common.base.Joiner; import com.google.testing.compile.JavaFileObjects; import org.junit.Test; import javax.tools.JavaFileObject; import butterknife.compiler.ButterKnifeProcessor; import static com.google.common.truth.Truth.assertAbout; import static com.google.testing.compile.JavaSourceSubjectFactory.javaSource; public class BindViewTest { @Test public void bindingView() { JavaFileObject source = JavaFileObjects.forSourceString("test.Test", Joiner.on('\n').join( "package test;", "import android.app.Activity;", "import android.view.View;", "import butterknife.BindView;", "public class Test extends Activity {", " @BindView(1) View thing;", "}" )); JavaFileObject expectedSource = JavaFileObjects.forSourceString("test/Test$$ViewBinder", "" + "package test;\n" + "import android.view.View;\n" + "import butterknife.Unbinder;\n" + "import butterknife.internal.Finder;\n" + "import butterknife.internal.ViewBinder;\n" + "import java.lang.IllegalStateException;\n" + "import java.lang.Object;\n" + "import java.lang.Override;\n" + "public class Test$$ViewBinder<T extends Test> implements ViewBinder<T> {\n" + " @Override\n" + " public Unbinder bind(final Finder finder, final T target, Object source) {\n" + " InnerUnbinder unbinder = createUnbinder(target);\n" + " View view;\n" + " view = finder.findRequiredView(source, 1, \"field 'thing'\");\n" + " target.thing = view;\n" + " return unbinder;\n" + " }\n" + " protected InnerUnbinder<T> createUnbinder(T target) {\n" + " return new InnerUnbinder(target);\n" + " }\n" + " protected static class InnerUnbinder<T extends Test> implements Unbinder {\n" + " private T target;\n" + " protected InnerUnbinder(T target) {\n" + " this.target = target;\n" + " }\n" + " @Override\n" + " public final void unbind() {\n" + " if (target == null) throw new IllegalStateException(\"Bindings already cleared.\");\n" + " unbind(target);\n" + " target = null;\n" + " }\n" + " protected void unbind(T target) {\n" + " target.thing = null;\n" + " }\n" + " }\n" + "}"); assertAbout(javaSource()).that(source) .processedWith(new ButterKnifeProcessor()) .compilesWithoutError() .and() .generatesSources(expectedSource); } @Test public void bindingViewFinalClass() { JavaFileObject source = JavaFileObjects.forSourceString("test.Test", Joiner.on('\n').join( "package test;", "import android.app.Activity;", "import android.view.View;", "import butterknife.BindView;", "public final class Test extends Activity {", " @BindView(1) View thing;", "}" )); JavaFileObject expectedSource = JavaFileObjects.forSourceString("test/Test$$ViewBinder", "" + "package test;\n" + "import android.view.View;\n" + "import butterknife.Unbinder;\n" + "import butterknife.internal.Finder;\n" + "import butterknife.internal.ViewBinder;\n" + "import java.lang.IllegalStateException;\n" + "import java.lang.Object;\n" + "import java.lang.Override;\n" + "public final class Test$$ViewBinder<T extends Test> implements ViewBinder<T> {\n" + " @Override\n" + " public Unbinder bind(final Finder finder, final T target, Object source) {\n" + " InnerUnbinder unbinder = new InnerUnbinder(target);\n" + " View view;\n" + " view = finder.findRequiredView(source, 1, \"field 'thing'\");\n" + " target.thing = view;\n" + " return unbinder;\n" + " }\n" + " protected static final class InnerUnbinder<T extends Test> implements Unbinder {\n" + " private T target;\n" + " protected InnerUnbinder(T target) {\n" + " this.target = target;\n" + " }\n" + " @Override\n" + " public final void unbind() {\n" + " if (target == null) throw new IllegalStateException(\"Bindings already cleared.\");\n" + " target.thing = null;\n" + " target = null;\n" + " }\n" + " }\n" + "}"); assertAbout(javaSource()).that(source) .processedWith(new ButterKnifeProcessor()) .compilesWithoutError() .and() .generatesSources(expectedSource); } @Test public void bindingInterface() throws Exception { JavaFileObject source = JavaFileObjects.forSourceString("test.Test", Joiner.on('\n').join( "package test;", "import android.app.Activity;", "import android.view.View;", "import butterknife.BindView;", "public class Test extends Activity {", " interface TestInterface {}", " @BindView(1) TestInterface thing;", "}" )); JavaFileObject expectedSource = JavaFileObjects.forSourceString("test/Test$$ViewBinder", "" + "package test;\n" + "import android.view.View;\n" + "import butterknife.Unbinder;\n" + "import butterknife.internal.Finder;\n" + "import butterknife.internal.ViewBinder;\n" + "import java.lang.IllegalStateException;\n" + "import java.lang.Object;\n" + "import java.lang.Override;\n" + "public class Test$$ViewBinder<T extends Test> implements ViewBinder<T> {\n" + " @Override\n" + " public Unbinder bind(final Finder finder, final T target, Object source) {\n" + " InnerUnbinder unbinder = createUnbinder(target);\n" + " View view;\n" + " view = finder.findRequiredView(source, 1, \"field 'thing'\");\n" + " target.thing = finder.castView(view, 1, \"field 'thing'\");\n" + " return unbinder;\n" + " }\n" + " protected InnerUnbinder<T> createUnbinder(T target) {\n" + " return new InnerUnbinder(target);\n" + " }\n" + " protected static class InnerUnbinder<T extends Test> implements Unbinder {\n" + " private T target;\n" + " protected InnerUnbinder(T target) {\n" + " this.target = target;\n" + " }\n" + " @Override\n" + " public final void unbind() {\n" + " if (target == null) throw new IllegalStateException(\"Bindings already cleared.\");\n" + " unbind(target);\n" + " target = null;\n" + " }\n" + " protected void unbind(T target) {\n" + " target.thing = null;\n" + " }\n" + " }\n" + "}"); assertAbout(javaSource()).that(source) .processedWith(new ButterKnifeProcessor()) .compilesWithoutError() .and() .generatesSources(expectedSource); } @Test public void genericType() { JavaFileObject source = JavaFileObjects.forSourceString("test.Test", Joiner.on('\n').join( "package test;", "import android.app.Activity;", "import android.widget.EditText;", "import android.widget.TextView;", "import butterknife.BindView;", "class Test<T extends TextView> extends Activity {", " @BindView(1) T thing;", "}" )); JavaFileObject expectedSource = JavaFileObjects.forSourceString("test/Test$$ViewBinder", "" + "package test;\n" + "import android.view.View;\n" + "import butterknife.Unbinder;\n" + "import butterknife.internal.Finder;\n" + "import butterknife.internal.ViewBinder;\n" + "import java.lang.IllegalStateException;\n" + "import java.lang.Object;\n" + "import java.lang.Override;\n" + "public class Test$$ViewBinder<T extends Test> implements ViewBinder<T> {\n" + " @Override\n" + " public Unbinder bind(final Finder finder, final T target, Object source) {\n" + " InnerUnbinder unbinder = createUnbinder(target);\n" + " View view;\n" + " view = finder.findRequiredView(source, 1, \"field 'thing'\");\n" + " target.thing = finder.castView(view, 1, \"field 'thing'\");\n" + " return unbinder;\n" + " }\n" + " protected InnerUnbinder<T> createUnbinder(T target) {\n" + " return new InnerUnbinder(target);\n" + " }\n" + " protected static class InnerUnbinder<T extends Test> implements Unbinder {\n" + " private T target;\n" + " protected InnerUnbinder(T target) {\n" + " this.target = target;\n" + " }\n" + " @Override\n" + " public final void unbind() {\n" + " if (target == null) throw new IllegalStateException(\"Bindings already cleared.\");\n" + " unbind(target);\n" + " target = null;\n" + " }\n" + " protected void unbind(T target) {\n" + " target.thing = null;\n" + " }\n" + " }\n" + "}"); assertAbout(javaSource()).that(source) .processedWith(new ButterKnifeProcessor()) .compilesWithoutError() .and() .generatesSources(expectedSource); } @Test public void oneFindPerId() { JavaFileObject source = JavaFileObjects.forSourceString("test.Test", Joiner.on('\n').join( "package test;", "import android.app.Activity;", "import android.view.View;", "import butterknife.BindView;", "import butterknife.OnClick;", "public class Test extends Activity {", " @BindView(1) View thing1;", " @OnClick(1) void doStuff() {}", "}" )); JavaFileObject expectedSource = JavaFileObjects.forSourceString("test/Test$$ViewBinder", "" + "package test;\n" + "import android.view.View;\n" + "import butterknife.Unbinder;\n" + "import butterknife.internal.DebouncingOnClickListener;\n" + "import butterknife.internal.Finder;\n" + "import butterknife.internal.ViewBinder;\n" + "import java.lang.IllegalStateException;\n" + "import java.lang.Object;\n" + "import java.lang.Override;\n" + "public class Test$$ViewBinder<T extends Test> implements ViewBinder<T> {\n" + " @Override\n" + " public Unbinder bind(final Finder finder, final T target, Object source) {\n" + " InnerUnbinder unbinder = createUnbinder(target);\n" + " View view;\n" + " view = finder.findRequiredView(source, 1, \"field 'thing1' and method 'doStuff'\");\n" + " target.thing1 = view;\n" + " unbinder.view1 = view;\n" + " view.setOnClickListener(new DebouncingOnClickListener() {\n" + " @Override\n" + " public void doClick(View p0) {\n" + " target.doStuff();\n" + " }\n" + " });\n" + " return unbinder;\n" + " }\n" + " protected InnerUnbinder<T> createUnbinder(T target) {\n" + " return new InnerUnbinder(target);\n" + " }\n" + " protected static class InnerUnbinder<T extends Test> implements Unbinder {\n" + " private T target;\n" + " View view1;\n" + " protected InnerUnbinder(T target) {\n" + " this.target = target;\n" + " }\n" + " @Override\n" + " public final void unbind() {\n" + " if (target == null) throw new IllegalStateException(\"Bindings already cleared.\");\n" + " unbind(target);\n" + " target = null;\n" + " }\n" + " protected void unbind(T target) {\n" + " view1.setOnClickListener(null);\n" + " target.thing1 = null;\n" + " }\n" + " }\n" + "}"); assertAbout(javaSource()).that(source) .processedWith(new ButterKnifeProcessor()) .compilesWithoutError() .and() .generatesSources(expectedSource); } @Test public void fieldVisibility() { JavaFileObject source = JavaFileObjects.forSourceString("test.Test", Joiner.on('\n').join( "package test;", "import android.app.Activity;", "import android.view.View;", "import butterknife.BindView;", "public class Test extends Activity {", " @BindView(1) public View thing1;", " @BindView(2) View thing2;", " @BindView(3) protected View thing3;", "}" )); assertAbout(javaSource()).that(source) .processedWith(new ButterKnifeProcessor()) .compilesWithoutError(); } @Test public void nullable() { JavaFileObject source = JavaFileObjects.forSourceString("test.Test", Joiner.on('\n').join( "package test;", "import android.app.Activity;", "import android.view.View;", "import butterknife.BindView;", "public class Test extends Activity {", " @interface Nullable {}", " @Nullable @BindView(1) View view;", "}" )); JavaFileObject expectedSource = JavaFileObjects.forSourceString("test/Test$$ViewBinder", "" + "package test;\n" + "import android.view.View;\n" + "import butterknife.Unbinder;\n" + "import butterknife.internal.Finder;\n" + "import butterknife.internal.ViewBinder;\n" + "import java.lang.IllegalStateException;\n" + "import java.lang.Object;\n" + "import java.lang.Override;\n" + "public class Test$$ViewBinder<T extends Test> implements ViewBinder<T> {\n" + " @Override\n" + " public Unbinder bind(final Finder finder, final T target, Object source) {\n" + " InnerUnbinder unbinder = createUnbinder(target);\n" + " View view;\n" + " view = finder.findOptionalView(source, 1, null);\n" + " target.view = view;\n" + " return unbinder;\n" + " }\n" + " protected InnerUnbinder<T> createUnbinder(T target) {\n" + " return new InnerUnbinder(target);\n" + " }\n" + " protected static class InnerUnbinder<T extends Test> implements Unbinder {\n" + " private T target;\n" + " protected InnerUnbinder(T target) {\n" + " this.target = target;\n" + " }\n" + " @Override\n" + " public final void unbind() {\n" + " if (target == null) throw new IllegalStateException(\"Bindings already cleared.\");\n" + " unbind(target);\n" + " target = null;\n" + " }\n" + " protected void unbind(T target) {\n" + " target.view = null;\n" + " }\n" + " }\n" + "}"); assertAbout(javaSource()).that(source) .processedWith(new ButterKnifeProcessor()) .compilesWithoutError() .and() .generatesSources(expectedSource); } @Test public void superclass() { JavaFileObject source = JavaFileObjects.forSourceString("test.Test", Joiner.on('\n').join( "package test;", "import android.app.Activity;", "import android.view.View;", "import butterknife.BindView;", "public class Test extends Activity {", " @BindView(1) View view;", "}", "class TestOne extends Test {", " @BindView(1) View thing;", "}", "class TestTwo extends Test {", "}" )); JavaFileObject expectedSource1 = JavaFileObjects.forSourceString("test/Test$$ViewBinder", "" + "package test;\n" + "import android.view.View;\n" + "import butterknife.Unbinder;\n" + "import butterknife.internal.Finder;\n" + "import butterknife.internal.ViewBinder;\n" + "import java.lang.IllegalStateException;\n" + "import java.lang.Object;\n" + "import java.lang.Override;\n" + "public class Test$$ViewBinder<T extends Test> implements ViewBinder<T> {\n" + " @Override\n" + " public Unbinder bind(final Finder finder, final T target, Object source) {\n" + " InnerUnbinder unbinder = createUnbinder(target);\n" + " View view;\n" + " view = finder.findRequiredView(source, 1, \"field 'view'\");\n" + " target.view = view;\n" + " return unbinder;\n" + " }\n" + " protected InnerUnbinder<T> createUnbinder(T target) {\n" + " return new InnerUnbinder(target);\n" + " }\n" + " protected static class InnerUnbinder<T extends Test> implements Unbinder {\n" + " private T target;\n" + " protected InnerUnbinder(T target) {\n" + " this.target = target;\n" + " }\n" + " @Override\n" + " public final void unbind() {\n" + " if (target == null) throw new IllegalStateException(\"Bindings already cleared.\");\n" + " unbind(target);\n" + " target = null;\n" + " }\n" + " protected void unbind(T target) {\n" + " target.view = null;\n" + " }\n" + " }\n" + "}"); JavaFileObject expectedSource2 = JavaFileObjects.forSourceString("test/TestOne$$ViewBinder", "" + "// Generated code from Butter Knife. Do not modify!\n" + "package test;\n" + "import android.view.View;\n" + "import butterknife.Unbinder;\n" + "import butterknife.internal.Finder;\n" + "import java.lang.Object;\n" + "import java.lang.Override;\n" + "public class TestOne$$ViewBinder<T extends TestOne> extends Test$$ViewBinder<T> {\n" + " @Override\n" + " public Unbinder bind(final Finder finder, final T target, Object source) {\n" + " InnerUnbinder unbinder = (InnerUnbinder) super.bind(finder, target, source);\n" + " View view;\n" + " view = finder.findRequiredView(source, 1, \"field 'thing'\");\n" + " target.thing = view;\n" + " return unbinder;\n" + " }\n" + " @Override\n" + " protected InnerUnbinder<T> createUnbinder(T target) {\n" + " return new InnerUnbinder(target);\n" + " }\n" + " protected static class InnerUnbinder<T extends TestOne> extends Test$$ViewBinder.InnerUnbinder<T> {\n" + " protected InnerUnbinder(T target) {\n" + " super(target);\n" + " }\n" + " @Override\n" + " protected void unbind(T target) {\n" + " super.unbind(target);\n" + " target.thing = null;\n" + " }\n" + " }\n" + "}\n"); assertAbout(javaSource()).that(source) .processedWith(new ButterKnifeProcessor()) .compilesWithoutError() .and() .generatesSources(expectedSource1, expectedSource2); } @Test public void genericSuperclass() { JavaFileObject source = JavaFileObjects.forSourceString("test.Test", Joiner.on('\n').join( "package test;", "import android.app.Activity;", "import android.view.View;", "import butterknife.BindView;", "public class Test<T> extends Activity {", " @BindView(1) View view;", "}", "class TestOne extends Test<String> {", " @BindView(1) View thing;", "}", "class TestTwo extends Test<Object> {", "}" )); JavaFileObject expectedSource1 = JavaFileObjects.forSourceString("test/Test$$ViewBinder", "" + "package test;\n" + "import android.view.View;\n" + "import butterknife.Unbinder;\n" + "import butterknife.internal.Finder;\n" + "import butterknife.internal.ViewBinder;\n" + "import java.lang.IllegalStateException;\n" + "import java.lang.Object;\n" + "import java.lang.Override;\n" + "public class Test$$ViewBinder<T extends Test> implements ViewBinder<T> {\n" + " @Override\n" + " public Unbinder bind(final Finder finder, final T target, Object source) {\n" + " InnerUnbinder unbinder = createUnbinder(target);\n" + " View view;\n" + " view = finder.findRequiredView(source, 1, \"field 'view'\");\n" + " target.view = view;\n" + " return unbinder;\n" + " }\n" + " protected InnerUnbinder<T> createUnbinder(T target) {\n" + " return new InnerUnbinder(target);\n" + " }\n" + " protected static class InnerUnbinder<T extends Test> implements Unbinder {\n" + " private T target;\n" + " protected InnerUnbinder(T target) {\n" + " this.target = target;\n" + " }\n" + " @Override\n" + " public final void unbind() {\n" + " if (target == null) throw new IllegalStateException(\"Bindings already cleared.\");\n" + " unbind(target);\n" + " target = null;\n" + " }\n" + " protected void unbind(T target) {\n" + " target.view = null;\n" + " }\n" + " }\n" + "}"); JavaFileObject expectedSource2 = JavaFileObjects.forSourceString("test/TestOne$$ViewBinder", "" + "package test;\n" + "import android.view.View;\n" + "import butterknife.Unbinder;\n" + "import butterknife.internal.Finder;\n" + "import java.lang.Object;\n" + "import java.lang.Override;\n" + "public class TestOne$$ViewBinder<T extends TestOne> extends Test$$ViewBinder<T> {\n" + " @Override\n" + " public Unbinder bind(final Finder finder, final T target, Object source) {\n" + " InnerUnbinder unbinder = (InnerUnbinder) super.bind(finder, target, source);\n" + " View view;\n" + " view = finder.findRequiredView(source, 1, \"field 'thing'\");\n" + " target.thing = view;\n" + " return unbinder;\n" + " }\n" + " @Override\n" + " protected InnerUnbinder<T> createUnbinder(T target) {\n" + " return new InnerUnbinder(target);\n" + " }\n" + " protected static class InnerUnbinder<T extends TestOne> extends Test$$ViewBinder.InnerUnbinder<T> {\n" + " protected InnerUnbinder(T target) {\n" + " super(target);\n" + " }\n" + " @Override\n" + " protected void unbind(T target) {\n" + " super.unbind(target);\n" + " target.thing = null;\n" + " }\n" + " }\n" + "}\n"); assertAbout(javaSource()).that(source) .processedWith(new ButterKnifeProcessor()) .compilesWithoutError() .and() .generatesSources(expectedSource1, expectedSource2); } @Test public void failsInJavaPackage() { JavaFileObject source = JavaFileObjects.forSourceString("test.Test", Joiner.on('\n').join( "package java.test;", "import android.view.View;", "import butterknife.BindView;", "public class Test {", " @BindView(1) View thing;", "}" )); assertAbout(javaSource()).that(source) .processedWith(new ButterKnifeProcessor()) .failsToCompile() .withErrorContaining( "@BindView-annotated class incorrectly in Java framework package. (java.test.Test)") .in(source).onLine(5); } @Test public void failsInAndroidPackage() { JavaFileObject source = JavaFileObjects.forSourceString("test.Test", Joiner.on('\n').join( "package android.test;", "import android.view.View;", "import butterknife.BindView;", "public class Test {", " @BindView(1) View thing;", "}" )); assertAbout(javaSource()).that(source) .processedWith(new ButterKnifeProcessor()) .failsToCompile() .withErrorContaining( "@BindView-annotated class incorrectly in Android framework package. (android.test.Test)") .in(source).onLine(5); } @Test public void failsIfInPrivateClass() { JavaFileObject source = JavaFileObjects.forSourceString("test.Test", Joiner.on('\n').join( "package test;", "import android.view.View;", "import butterknife.BindView;", "public class Test {", " private static class Inner {", " @BindView(1) View thing;", " }", "}" )); assertAbout(javaSource()).that(source) .processedWith(new ButterKnifeProcessor()) .failsToCompile() .withErrorContaining( "@BindView fields may not be contained in private classes. (test.Test.Inner.thing)") .in(source).onLine(5); } @Test public void failsIfNotView() { JavaFileObject source = JavaFileObjects.forSourceString("test.Test", Joiner.on('\n').join( "package test;", "import android.app.Activity;", "import butterknife.BindView;", "public class Test extends Activity {", " @BindView(1) String thing;", "}" )); assertAbout(javaSource()).that(source) .processedWith(new ButterKnifeProcessor()) .failsToCompile() .withErrorContaining("@BindView fields must extend from View or be an interface. (test.Test.thing)") .in(source).onLine(5); } @Test public void failsIfInInterface() { JavaFileObject source = JavaFileObjects.forSourceString("test.Test", Joiner.on('\n').join( "package test;", "import android.view.View;", "import butterknife.BindView;", "public interface Test {", " @BindView(1) View thing = null;", "}" )); assertAbout(javaSource()).that(source) .processedWith(new ButterKnifeProcessor()) .failsToCompile() .withErrorContaining( "@BindView fields may only be contained in classes. (test.Test.thing)") .in(source).onLine(4); } @Test public void failsIfPrivate() { JavaFileObject source = JavaFileObjects.forSourceString("test.Test", Joiner.on('\n').join( "package test;", "import android.app.Activity;", "import android.view.View;", "import butterknife.BindView;", "public class Test extends Activity {", " @BindView(1) private View thing;", "}" )); assertAbout(javaSource()).that(source) .processedWith(new ButterKnifeProcessor()) .failsToCompile() .withErrorContaining("@BindView fields must not be private or static. (test.Test.thing)") .in(source).onLine(6); } @Test public void failsIfStatic() { JavaFileObject source = JavaFileObjects.forSourceString("test.Test", Joiner.on('\n').join( "package test;", "import android.app.Activity;", "import android.view.View;", "import butterknife.BindView;", "public class Test extends Activity {", " @BindView(1) static View thing;", "}" )); assertAbout(javaSource()).that(source) .processedWith(new ButterKnifeProcessor()) .failsToCompile() .withErrorContaining("@BindView fields must not be private or static. (test.Test.thing)") .in(source).onLine(6); } @Test public void duplicateBindingFails() throws Exception { JavaFileObject source = JavaFileObjects.forSourceString("test.Test", Joiner.on('\n').join( "package test;", "import android.app.Activity;", "import android.view.View;", "import butterknife.BindView;", "public class Test extends Activity {", " @BindView(1) View thing1;", " @BindView(1) View thing2;", "}" )); assertAbout(javaSource()).that(source) .processedWith(new ButterKnifeProcessor()) .failsToCompile() .withErrorContaining( "Attempt to use @BindView for an already bound ID 1 on 'thing1'. (test.Test.thing2)") .in(source).onLine(7); } @Test public void failsRootViewBindingWithBadTarget() throws Exception { JavaFileObject source = JavaFileObjects.forSourceString("test.Test", Joiner.on('\n').join( "package test;", "import android.content.Context;", "import android.view.View;", "import butterknife.OnItemClick;", "public class Test extends View {", " @OnItemClick void doStuff() {}", " public Test(Context context) {", " super(context);", " }", "}")); assertAbout(javaSource()) .that(source) .processedWith(new ButterKnifeProcessor()) .failsToCompile() .withErrorContaining(( "@OnItemClick annotation without an ID may only be used with an object of type " + "\"android.widget.AdapterView<?>\" or an interface. (test.Test.doStuff)")) .in(source) .onLine(6); } @Test public void failsOptionalRootViewBinding() throws Exception { JavaFileObject source = JavaFileObjects.forSourceString("test.Test", Joiner.on('\n').join( "package test;", "import android.content.Context;", "import android.view.View;", "import butterknife.OnClick;", "import butterknife.Optional;", "public class Test extends View {", " @Optional @OnClick void doStuff() {}", " public Test(Context context) {", " super(context);", " }", "}")); assertAbout(javaSource()) .that(source) .processedWith(new ButterKnifeProcessor()) .failsToCompile() .withErrorContaining( ("ID-free binding must not be annotated with @Optional. (test.Test.doStuff)")) .in(source) .onLine(7); } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.codeInsight.actions; import com.intellij.codeInsight.CodeInsightBundle; import com.intellij.lang.LanguageFormatting; import com.intellij.notification.Notification; import com.intellij.notification.NotificationType; import com.intellij.openapi.application.ApplicationBundle; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.ModalityState; import com.intellij.openapi.application.ReadAction; import com.intellij.openapi.command.CommandProcessor; import com.intellij.openapi.command.WriteCommandAction; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.SelectionModel; import com.intellij.openapi.fileEditor.FileDocumentManager; import com.intellij.openapi.module.Module; import com.intellij.openapi.progress.ProcessCanceledException; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.progress.ProgressIndicatorProvider; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.progress.util.ProgressWindow; import com.intellij.openapi.project.DumbService; import com.intellij.openapi.project.IndexNotReadyException; import com.intellij.openapi.project.Project; import com.intellij.openapi.project.ProjectUtil; import com.intellij.openapi.roots.GeneratedSourcesFilter; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.ui.ex.MessagesEx; import com.intellij.openapi.util.Computable; import com.intellij.openapi.util.Ref; import com.intellij.openapi.util.TextRange; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.vfs.VirtualFileFilter; import com.intellij.psi.PsiBundle; import com.intellij.psi.PsiDirectory; import com.intellij.psi.PsiDocumentManager; import com.intellij.psi.PsiFile; import com.intellij.util.ExceptionUtil; import com.intellij.util.IncorrectOperationException; import com.intellij.util.SequentialTask; import com.intellij.util.SmartList; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.diff.FilesTooBigForDiffException; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.concurrent.CancellationException; import java.util.concurrent.ExecutionException; import java.util.concurrent.FutureTask; public abstract class AbstractLayoutCodeProcessor { private static final Logger LOG = Logger.getInstance("#com.intellij.codeInsight.actions.AbstractLayoutCodeProcessor"); protected final Project myProject; private final Module myModule; private PsiDirectory myDirectory; private PsiFile myFile; private List<PsiFile> myFiles; private boolean myIncludeSubdirs; private final String myProgressText; private final String myCommandName; private Runnable myPostRunnable; private boolean myProcessChangedTextOnly; protected AbstractLayoutCodeProcessor myPreviousCodeProcessor; private List<VirtualFileFilter> myFilters = ContainerUtil.newArrayList(); private LayoutCodeInfoCollector myInfoCollector; protected AbstractLayoutCodeProcessor(Project project, String commandName, String progressText, boolean processChangedTextOnly) { this(project, (Module)null, commandName, progressText, processChangedTextOnly); } protected AbstractLayoutCodeProcessor(@NotNull AbstractLayoutCodeProcessor previous, @NotNull String commandName, @NotNull String progressText) { myProject = previous.myProject; myModule = previous.myModule; myDirectory = previous.myDirectory; myFile = previous.myFile; myFiles = previous.myFiles; myIncludeSubdirs = previous.myIncludeSubdirs; myProcessChangedTextOnly = previous.myProcessChangedTextOnly; myPostRunnable = null; myProgressText = progressText; myCommandName = commandName; myPreviousCodeProcessor = previous; myFilters = previous.myFilters; myInfoCollector = previous.myInfoCollector; } protected AbstractLayoutCodeProcessor(Project project, @Nullable Module module, String commandName, String progressText, boolean processChangedTextOnly) { myProject = project; myModule = module; myDirectory = null; myIncludeSubdirs = true; myCommandName = commandName; myProgressText = progressText; myPostRunnable = null; myProcessChangedTextOnly = processChangedTextOnly; } protected AbstractLayoutCodeProcessor(Project project, PsiDirectory directory, boolean includeSubdirs, String progressText, String commandName, boolean processChangedTextOnly) { myProject = project; myModule = null; myDirectory = directory; myIncludeSubdirs = includeSubdirs; myProgressText = progressText; myCommandName = commandName; myPostRunnable = null; myProcessChangedTextOnly = processChangedTextOnly; } protected AbstractLayoutCodeProcessor(Project project, PsiFile file, String progressText, String commandName, boolean processChangedTextOnly) { myProject = project; myModule = null; myFile = file; myProgressText = progressText; myCommandName = commandName; myPostRunnable = null; myProcessChangedTextOnly = processChangedTextOnly; } protected AbstractLayoutCodeProcessor(Project project, PsiFile[] files, String progressText, String commandName, @Nullable Runnable postRunnable, boolean processChangedTextOnly) { myProject = project; myModule = null; myFiles = filterFilesTo(files, new ArrayList<>()); myProgressText = progressText; myCommandName = commandName; myPostRunnable = postRunnable; myProcessChangedTextOnly = processChangedTextOnly; } private static List<PsiFile> filterFilesTo(PsiFile[] files, List<PsiFile> list) { for (PsiFile file : files) { if (canBeFormatted(file)) { list.add(file); } } return list; } public void setPostRunnable(Runnable postRunnable) { myPostRunnable = postRunnable; } @Nullable private FutureTask<Boolean> getPreviousProcessorTask(@NotNull PsiFile file, boolean processChangedTextOnly) { return myPreviousCodeProcessor != null ? myPreviousCodeProcessor.preprocessFile(file, processChangedTextOnly) : null; } public void setCollectInfo(boolean isCollectInfo) { myInfoCollector = isCollectInfo ? new LayoutCodeInfoCollector() : null; AbstractLayoutCodeProcessor current = this; while (current.myPreviousCodeProcessor != null) { current = current.myPreviousCodeProcessor; current.myInfoCollector = myInfoCollector; } } public void addFileFilter(@NotNull VirtualFileFilter filter) { myFilters.add(filter); } protected void setProcessChangedTextOnly(boolean value) { myProcessChangedTextOnly = value; } /** * Ensures that given file is ready to reformatting and prepares it if necessary. * * @param file file to process * @param processChangedTextOnly flag that defines is only the changed text (in terms of VCS change) should be processed * @return task that triggers formatting of the given file. Returns value of that task indicates whether formatting * is finished correctly or not (exception occurred, user cancelled formatting etc) * @throws IncorrectOperationException if unexpected exception occurred during formatting */ @NotNull protected abstract FutureTask<Boolean> prepareTask(@NotNull PsiFile file, boolean processChangedTextOnly) throws IncorrectOperationException; public FutureTask<Boolean> preprocessFile(@NotNull PsiFile file, boolean processChangedTextOnly) throws IncorrectOperationException { final FutureTask<Boolean> previousTask = getPreviousProcessorTask(file, processChangedTextOnly); final FutureTask<Boolean> currentTask = prepareTask(file, processChangedTextOnly); return new FutureTask<>(() -> { try { if (previousTask != null) { previousTask.run(); if (!previousTask.get() || previousTask.isCancelled()) return false; } ApplicationManager.getApplication().runWriteAction(() -> currentTask.run()); return currentTask.get() && !currentTask.isCancelled(); } catch (ExecutionException e) { ExceptionUtil.rethrowUnchecked(e.getCause()); throw e; } }); } public void run() { if (myFile != null) { runProcessFile(myFile); return; } FileTreeIterator iterator; if (myFiles != null) { iterator = new FileTreeIterator(myFiles); } else { iterator = myProcessChangedTextOnly ? buildChangedFilesIterator() : buildFileTreeIterator(); } runProcessFiles(iterator); } private FileTreeIterator buildFileTreeIterator() { if (myDirectory != null) { return new FileTreeIterator(myDirectory); } else if (myFiles != null) { return new FileTreeIterator(myFiles); } else if (myModule != null) { return new FileTreeIterator(myModule); } else if (myProject != null) { return new FileTreeIterator(myProject); } return new FileTreeIterator(Collections.emptyList()); } @NotNull private FileTreeIterator buildChangedFilesIterator() { List<PsiFile> files = getChangedFilesFromContext(); return new FileTreeIterator(files); } @NotNull private List<PsiFile> getChangedFilesFromContext() { List<PsiDirectory> dirs = getAllSearchableDirsFromContext(); return FormatChangedTextUtil.getChangedFilesFromDirs(myProject, dirs); } private List<PsiDirectory> getAllSearchableDirsFromContext() { List<PsiDirectory> dirs = ContainerUtil.newArrayList(); if (myDirectory != null) { dirs.add(myDirectory); } else if (myModule != null) { List<PsiDirectory> allModuleDirs = FileTreeIterator.collectModuleDirectories(myModule); dirs.addAll(allModuleDirs); } else if (myProject != null) { List<PsiDirectory> allProjectDirs = FileTreeIterator.collectProjectDirectories(myProject); dirs.addAll(allProjectDirs); } return dirs; } private void runProcessFile(@NotNull final PsiFile file) { Document document = PsiDocumentManager.getInstance(myProject).getDocument(file); if (document == null) { return; } if (!FileDocumentManager.getInstance().requestWriting(document, myProject)) { Messages.showMessageDialog(myProject, PsiBundle.message("cannot.modify.a.read.only.file", file.getName()), CodeInsightBundle.message("error.dialog.readonly.file.title"), Messages.getErrorIcon() ); return; } final Ref<FutureTask<Boolean>> writeActionRunnable = new Ref<>(); Runnable readAction = () -> { if (!checkFileWritable(file)) return; try{ FutureTask<Boolean> writeTask = preprocessFile(file, myProcessChangedTextOnly); writeActionRunnable.set(writeTask); } catch(IncorrectOperationException e){ LOG.error(e); } }; Runnable writeAction = () -> { if (writeActionRunnable.isNull()) return; FutureTask<Boolean> task = writeActionRunnable.get(); task.run(); try { task.get(); } catch (CancellationException ignored) { } catch (ExecutionException e) { if (e.getCause() instanceof IndexNotReadyException) { throw (IndexNotReadyException)e.getCause(); } LOG.error(e); } catch (Exception e) { LOG.error(e); } }; runLayoutCodeProcess(readAction, writeAction); } private boolean checkFileWritable(final PsiFile file){ if (!file.isWritable()){ MessagesEx.fileIsReadOnly(myProject, file.getVirtualFile()) .setTitle(CodeInsightBundle.message("error.dialog.readonly.file.title")) .showLater(); return false; } else{ return true; } } private void runProcessFiles(@NotNull final FileTreeIterator fileIterator) { boolean isSuccess = ProgressManager.getInstance().runProcessWithProgressSynchronously(() -> { ProgressIndicator indicator = ProgressManager.getInstance().getProgressIndicator(); ReformatFilesTask task = new ReformatFilesTask(fileIterator, indicator); while (!task.isDone()) { task.iteration(); } }, myCommandName, true, myProject); if (isSuccess && myPostRunnable != null) { myPostRunnable.run(); } } private static boolean canBeFormatted(PsiFile file) { if (LanguageFormatting.INSTANCE.forContext(file) == null) { return false; } VirtualFile virtualFile = file.getVirtualFile(); if (virtualFile == null) return true; if (ProjectUtil.isProjectOrWorkspaceFile(virtualFile)) return false; return !GeneratedSourcesFilter.isGeneratedSourceByAnyFilter(virtualFile, file.getProject()); } private void runLayoutCodeProcess(final Runnable readAction, final Runnable writeAction) { final ProgressWindow progressWindow = new ProgressWindow(true, myProject); progressWindow.setTitle(myCommandName); progressWindow.setText(myProgressText); final ModalityState modalityState = ModalityState.current(); final Runnable process = () -> ApplicationManager.getApplication().runReadAction(readAction); Runnable runnable = () -> { try { ProgressManager.getInstance().runProcess(process, progressWindow); } catch(ProcessCanceledException e) { return; } catch(IndexNotReadyException e) { LOG.warn(e); return; } final Runnable writeRunnable = () -> CommandProcessor.getInstance().executeCommand(myProject, () -> { try { writeAction.run(); if (myPostRunnable != null) { ApplicationManager.getApplication().invokeLater(myPostRunnable); } } catch (IndexNotReadyException e) { LOG.warn(e); } }, myCommandName, null); if (ApplicationManager.getApplication().isUnitTestMode()) { writeRunnable.run(); } else { ApplicationManager.getApplication().invokeLater(writeRunnable, modalityState, myProject.getDisposed()); } }; if (ApplicationManager.getApplication().isUnitTestMode()) { runnable.run(); } else { ApplicationManager.getApplication().executeOnPooledThread(runnable); } } public void runWithoutProgress() throws IncorrectOperationException { final Runnable runnable = preprocessFile(myFile, myProcessChangedTextOnly); runnable.run(); } private List<AbstractLayoutCodeProcessor> getAllProcessors() { AbstractLayoutCodeProcessor current = this; List<AbstractLayoutCodeProcessor> all = ContainerUtil.newArrayList(); while (current != null) { all.add(current); current = current.myPreviousCodeProcessor; } Collections.reverse(all); return all; } private class ReformatFilesTask implements SequentialTask { private final List<AbstractLayoutCodeProcessor> myProcessors; private final FileTreeIterator myFileTreeIterator; private final FileTreeIterator myCountingIterator; private final ProgressIndicator myProgressIndicator; private int myTotalFiles = 0; private int myFilesProcessed = 0; private boolean myStopFormatting; private boolean myFilesCountingFinished; ReformatFilesTask(@NotNull FileTreeIterator fileIterator, @NotNull ProgressIndicator indicator) { myFileTreeIterator = fileIterator; myCountingIterator = new FileTreeIterator(fileIterator); myProcessors = getAllProcessors(); myProgressIndicator = indicator; } @Override public void prepare() { } @Override public boolean isDone() { return myStopFormatting || !hasFilesToProcess(myFileTreeIterator); } private void countingIteration() { if (hasFilesToProcess(myCountingIterator)) { nextFile(myCountingIterator); myTotalFiles++; } else { myFilesCountingFinished = true; } } @Override public boolean iteration() { if (myStopFormatting) { return true; } if (!myFilesCountingFinished) { updateIndicatorText(ApplicationBundle.message("bulk.reformat.prepare.progress.text"), ""); countingIteration(); return true; } updateIndicatorFraction(myFilesProcessed); if (hasFilesToProcess(myFileTreeIterator)) { PsiFile file = nextFile(myFileTreeIterator); myFilesProcessed++; if (shouldProcessFile(file)) { updateIndicatorText(ApplicationBundle.message("bulk.reformat.process.progress.text"), getPresentablePath(file)); DumbService.getInstance(myProject).withAlternativeResolveEnabled(() -> performFileProcessing(file)); } } return true; } @NotNull private PsiFile nextFile(FileTreeIterator it) { return ReadAction.compute(it::next); } private boolean hasFilesToProcess(FileTreeIterator it) { return it.hasNext(); } private Boolean shouldProcessFile(PsiFile file) { Computable<Boolean> computable = () -> file.isWritable() && canBeFormatted(file) && acceptedByFilters(file); return ApplicationManager.getApplication().runReadAction(computable); } private void performFileProcessing(@NotNull PsiFile file) { for (AbstractLayoutCodeProcessor processor : myProcessors) { FutureTask<Boolean> writeTask = ReadAction.compute(() -> processor.prepareTask(file, myProcessChangedTextOnly)); ProgressIndicatorProvider.checkCanceled(); ApplicationManager.getApplication().invokeAndWait(() -> WriteCommandAction.runWriteCommandAction(myProject, myCommandName, null, writeTask)); checkStop(writeTask, file); } } private void checkStop(FutureTask<Boolean> task, PsiFile file) { try { if (!task.get() || task.isCancelled()) { myStopFormatting = true; } } catch (InterruptedException | ExecutionException e) { Throwable cause = e.getCause(); if (cause instanceof IndexNotReadyException) { LOG.warn(cause); return; } LOG.error("Got unexpected exception during formatting " + file, e); } } private void updateIndicatorText(@NotNull String upperLabel, @NotNull String downLabel) { myProgressIndicator.setText(upperLabel); myProgressIndicator.setText2(downLabel); } private String getPresentablePath(@NotNull PsiFile file) { VirtualFile vFile = file.getVirtualFile(); return vFile != null ? ProjectUtil.calcRelativeToProjectPath(vFile, myProject) : file.getName(); } private void updateIndicatorFraction(int processed) { myProgressIndicator.setFraction((double)processed / myTotalFiles); } @Override public void stop() { myStopFormatting = true; } } private boolean acceptedByFilters(@NotNull PsiFile file) { VirtualFile vFile = file.getVirtualFile(); if (vFile == null) { return false; } for (VirtualFileFilter filter : myFilters) { if (!filter.accept(file.getVirtualFile())) { return false; } } return true; } protected static List<TextRange> getSelectedRanges(@NotNull SelectionModel selectionModel) { final List<TextRange> ranges = new SmartList<>(); if (selectionModel.hasSelection()) { TextRange range = TextRange.create(selectionModel.getSelectionStart(), selectionModel.getSelectionEnd()); ranges.add(range); } return ranges; } protected void handleFileTooBigException(Logger logger, FilesTooBigForDiffException e, @NotNull PsiFile file) { logger.info("Error while calculating changed ranges for: " + file.getVirtualFile(), e); if (!ApplicationManager.getApplication().isUnitTestMode()) { Notification notification = new Notification(ApplicationBundle.message("reformat.changed.text.file.too.big.notification.groupId"), ApplicationBundle.message("reformat.changed.text.file.too.big.notification.title"), ApplicationBundle.message("reformat.changed.text.file.too.big.notification.text", file.getName()), NotificationType.INFORMATION); notification.notify(file.getProject()); } } @Nullable public LayoutCodeInfoCollector getInfoCollector() { return myInfoCollector; } }
/* * Copyright 2012-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.context.properties.bind; import java.beans.PropertyEditorSupport; import java.io.File; import java.time.Duration; import java.util.Collections; import java.util.List; import java.util.function.Consumer; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; import org.springframework.beans.PropertyEditorRegistry; import org.springframework.core.ResolvableType; import org.springframework.core.convert.ConversionService; import org.springframework.core.convert.ConverterNotFoundException; import org.springframework.core.convert.TypeDescriptor; import org.springframework.core.convert.converter.Converter; import org.springframework.core.convert.support.GenericConversionService; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatExceptionOfType; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.verify; /** * Tests for {@link BindConverter}. * * @author Phillip Webb * @author Andy Wilkinson */ @ExtendWith(MockitoExtension.class) class BindConverterTests { @Mock private Consumer<PropertyEditorRegistry> propertyEditorInitializer; @Test void createWhenPropertyEditorInitializerIsNullShouldCreate() { BindConverter.get(null, null); } @Test void createWhenPropertyEditorInitializerIsNotNullShouldUseToInitialize() { BindConverter.get(null, this.propertyEditorInitializer); verify(this.propertyEditorInitializer).accept(any(PropertyEditorRegistry.class)); } @Test void canConvertWhenHasDefaultEditorShouldReturnTrue() { BindConverter bindConverter = getPropertyEditorOnlyBindConverter(null); assertThat(bindConverter.canConvert("java.lang.RuntimeException", ResolvableType.forClass(Class.class))) .isTrue(); } @Test void canConvertWhenHasCustomEditorShouldReturnTrue() { BindConverter bindConverter = getPropertyEditorOnlyBindConverter(this::registerSampleTypeEditor); assertThat(bindConverter.canConvert("test", ResolvableType.forClass(SampleType.class))).isTrue(); } @Test void canConvertWhenHasEditorByConventionShouldReturnTrue() { BindConverter bindConverter = getPropertyEditorOnlyBindConverter(null); assertThat(bindConverter.canConvert("test", ResolvableType.forClass(ConventionType.class))).isTrue(); } @Test void canConvertWhenHasEditorForCollectionElementShouldReturnTrue() { BindConverter bindConverter = getPropertyEditorOnlyBindConverter(this::registerSampleTypeEditor); assertThat(bindConverter.canConvert("test", ResolvableType.forClassWithGenerics(List.class, SampleType.class))) .isTrue(); } @Test void canConvertWhenHasEditorForArrayElementShouldReturnTrue() { BindConverter bindConverter = getPropertyEditorOnlyBindConverter(this::registerSampleTypeEditor); assertThat(bindConverter.canConvert("test", ResolvableType.forClass(SampleType[].class))).isTrue(); } @Test void canConvertWhenConversionServiceCanConvertShouldReturnTrue() { BindConverter bindConverter = getBindConverter(new SampleTypeConverter()); assertThat(bindConverter.canConvert("test", ResolvableType.forClass(SampleType.class))).isTrue(); } @Test void canConvertWhenNotPropertyEditorAndConversionServiceCannotConvertShouldReturnFalse() { BindConverter bindConverter = BindConverter.get(null, null); assertThat(bindConverter.canConvert("test", ResolvableType.forClass(SampleType.class))).isFalse(); } @Test void convertWhenHasDefaultEditorShouldConvert() { BindConverter bindConverter = getPropertyEditorOnlyBindConverter(null); Class<?> converted = bindConverter.convert("java.lang.RuntimeException", ResolvableType.forClass(Class.class)); assertThat(converted).isEqualTo(RuntimeException.class); } @Test void convertWhenHasCustomEditorShouldConvert() { BindConverter bindConverter = getPropertyEditorOnlyBindConverter(this::registerSampleTypeEditor); SampleType converted = bindConverter.convert("test", ResolvableType.forClass(SampleType.class)); assertThat(converted.getText()).isEqualTo("test"); } @Test void convertWhenHasEditorByConventionShouldConvert() { BindConverter bindConverter = getPropertyEditorOnlyBindConverter(null); ConventionType converted = bindConverter.convert("test", ResolvableType.forClass(ConventionType.class)); assertThat(converted.getText()).isEqualTo("test"); } @Test void convertWhenHasEditorForCollectionElementShouldConvert() { BindConverter bindConverter = getPropertyEditorOnlyBindConverter(this::registerSampleTypeEditor); List<SampleType> converted = bindConverter.convert("test", ResolvableType.forClassWithGenerics(List.class, SampleType.class)); assertThat(converted).hasSize(1); assertThat(converted.get(0).getText()).isEqualTo("test"); } @Test void convertWhenHasEditorForArrayElementShouldConvert() { BindConverter bindConverter = getPropertyEditorOnlyBindConverter(this::registerSampleTypeEditor); SampleType[] converted = bindConverter.convert("test", ResolvableType.forClass(SampleType[].class)); assertThat(converted).isNotEmpty(); assertThat(converted[0].getText()).isEqualTo("test"); } @Test void convertWhenConversionServiceCanConvertShouldConvert() { BindConverter bindConverter = getBindConverter(new SampleTypeConverter()); SampleType converted = bindConverter.convert("test", ResolvableType.forClass(SampleType.class)); assertThat(converted.getText()).isEqualTo("test"); } @Test void convertWhenNotPropertyEditorAndConversionServiceCannotConvertShouldThrowException() { BindConverter bindConverter = BindConverter.get(null, null); assertThatExceptionOfType(ConverterNotFoundException.class) .isThrownBy(() -> bindConverter.convert("test", ResolvableType.forClass(SampleType.class))); } @Test void convertWhenConvertingToFileShouldExcludeFileEditor() { // For back compatibility we want true file conversion and not an accidental // classpath resource reference. See gh-12163 BindConverter bindConverter = BindConverter.get(Collections.singletonList(new GenericConversionService()), null); File result = bindConverter.convert(".", ResolvableType.forClass(File.class)); assertThat(result.getPath()).isEqualTo("."); } @Test void fallsBackToApplicationConversionService() { BindConverter bindConverter = BindConverter.get(Collections.singletonList(new GenericConversionService()), null); Duration result = bindConverter.convert("10s", ResolvableType.forClass(Duration.class)); assertThat(result.getSeconds()).isEqualTo(10); } private BindConverter getPropertyEditorOnlyBindConverter( Consumer<PropertyEditorRegistry> propertyEditorInitializer) { return BindConverter.get(Collections.singletonList(new ThrowingConversionService()), propertyEditorInitializer); } private BindConverter getBindConverter(Converter<?, ?> converter) { GenericConversionService conversionService = new GenericConversionService(); conversionService.addConverter(converter); return BindConverter.get(Collections.singletonList(conversionService), null); } private void registerSampleTypeEditor(PropertyEditorRegistry registry) { registry.registerCustomEditor(SampleType.class, new SampleTypePropertyEditor()); } static class SampleType { private String text; String getText() { return this.text; } } static class SampleTypePropertyEditor extends PropertyEditorSupport { @Override public void setAsText(String text) throws IllegalArgumentException { SampleType value = new SampleType(); value.text = text; setValue(value); } } static class SampleTypeConverter implements Converter<String, SampleType> { @Override public SampleType convert(String source) { SampleType result = new SampleType(); result.text = source; return result; } } static class ConventionType { private String text; String getText() { return this.text; } } static class ConventionTypeEditor extends PropertyEditorSupport { @Override public void setAsText(String text) throws IllegalArgumentException { ConventionType value = new ConventionType(); value.text = text; setValue(value); } } /** * {@link ConversionService} that always throws an {@link AssertionError}. */ static class ThrowingConversionService implements ConversionService { @Override public boolean canConvert(Class<?> sourceType, Class<?> targetType) { throw new AssertionError("Should not call conversion service"); } @Override public boolean canConvert(TypeDescriptor sourceType, TypeDescriptor targetType) { throw new AssertionError("Should not call conversion service"); } @Override public <T> T convert(Object source, Class<T> targetType) { throw new AssertionError("Should not call conversion service"); } @Override public Object convert(Object source, TypeDescriptor sourceType, TypeDescriptor targetType) { throw new AssertionError("Should not call conversion service"); } } }
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.resourcegroupstaggingapi.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * A list of resource ARNs and the tags (keys and values) that are associated with each. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/resourcegroupstaggingapi-2017-01-26/ResourceTagMapping" * target="_top">AWS API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ResourceTagMapping implements Serializable, Cloneable, StructuredPojo { /** * <p> * The ARN of the resource. * </p> */ private String resourceARN; /** * <p> * The tags that have been applied to one or more AWS resources. * </p> */ private java.util.List<Tag> tags; /** * <p> * The ARN of the resource. * </p> * * @param resourceARN * The ARN of the resource. */ public void setResourceARN(String resourceARN) { this.resourceARN = resourceARN; } /** * <p> * The ARN of the resource. * </p> * * @return The ARN of the resource. */ public String getResourceARN() { return this.resourceARN; } /** * <p> * The ARN of the resource. * </p> * * @param resourceARN * The ARN of the resource. * @return Returns a reference to this object so that method calls can be chained together. */ public ResourceTagMapping withResourceARN(String resourceARN) { setResourceARN(resourceARN); return this; } /** * <p> * The tags that have been applied to one or more AWS resources. * </p> * * @return The tags that have been applied to one or more AWS resources. */ public java.util.List<Tag> getTags() { return tags; } /** * <p> * The tags that have been applied to one or more AWS resources. * </p> * * @param tags * The tags that have been applied to one or more AWS resources. */ public void setTags(java.util.Collection<Tag> tags) { if (tags == null) { this.tags = null; return; } this.tags = new java.util.ArrayList<Tag>(tags); } /** * <p> * The tags that have been applied to one or more AWS resources. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setTags(java.util.Collection)} or {@link #withTags(java.util.Collection)} if you want to override the * existing values. * </p> * * @param tags * The tags that have been applied to one or more AWS resources. * @return Returns a reference to this object so that method calls can be chained together. */ public ResourceTagMapping withTags(Tag... tags) { if (this.tags == null) { setTags(new java.util.ArrayList<Tag>(tags.length)); } for (Tag ele : tags) { this.tags.add(ele); } return this; } /** * <p> * The tags that have been applied to one or more AWS resources. * </p> * * @param tags * The tags that have been applied to one or more AWS resources. * @return Returns a reference to this object so that method calls can be chained together. */ public ResourceTagMapping withTags(java.util.Collection<Tag> tags) { setTags(tags); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getResourceARN() != null) sb.append("ResourceARN: ").append(getResourceARN()).append(","); if (getTags() != null) sb.append("Tags: ").append(getTags()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ResourceTagMapping == false) return false; ResourceTagMapping other = (ResourceTagMapping) obj; if (other.getResourceARN() == null ^ this.getResourceARN() == null) return false; if (other.getResourceARN() != null && other.getResourceARN().equals(this.getResourceARN()) == false) return false; if (other.getTags() == null ^ this.getTags() == null) return false; if (other.getTags() != null && other.getTags().equals(this.getTags()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getResourceARN() == null) ? 0 : getResourceARN().hashCode()); hashCode = prime * hashCode + ((getTags() == null) ? 0 : getTags().hashCode()); return hashCode; } @Override public ResourceTagMapping clone() { try { return (ResourceTagMapping) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.resourcegroupstaggingapi.model.transform.ResourceTagMappingMarshaller.getInstance().marshall(this, protocolMarshaller); } }
package ekraft.verysimplerest; import com.sun.net.httpserver.HttpServer; import ekraft.verysimplerest.annotation.AnnotationHttpHandler; import ekraft.verysimplerest.lambda.HttpRequest; import ekraft.verysimplerest.lambda.HttpRequestHandler; import ekraft.verysimplerest.lambda.LambdaHttpHandler; import ekraft.verysimplerest.utils.RestException; import java.io.IOException; import java.net.InetSocketAddress; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.Map; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import java.util.function.Supplier; import static ekraft.verysimplerest.utils.RestConstants.DELETE; import static ekraft.verysimplerest.utils.RestConstants.GET; import static ekraft.verysimplerest.utils.RestConstants.POST; import static ekraft.verysimplerest.utils.RestConstants.PUT; public class RestServer { private static final int DEFAULT_TIMEOUT = 5; private static final int DEFAULT_THREAD_POOL_SIZE = 100; private static final int DEFAULT_BACKLOG_SIZE = 100; private boolean debug = false; private Map<Object, RestHttpHandler> services = new LinkedHashMap<>(); private LambdaHttpHandler lambdaHttpHandler = new LambdaHttpHandler(this); private HttpServer server = null; private InetSocketAddress inetSocketAddress; private ScheduledExecutorService executor; private int timeout; public RestServer(int port) { inetSocketAddress = new InetSocketAddress("localhost", port); executor = Executors.newScheduledThreadPool(DEFAULT_THREAD_POOL_SIZE); this.timeout = DEFAULT_TIMEOUT; } public synchronized void addService(Object service) { if (services.containsKey(service)) { return; } RestHttpHandler handler = getHttpHandler(service); if (handler == null) { throw new IllegalArgumentException("Unhandled Service"); } startServer(); server.createContext(handler.getPath(), handler); services.put(service, handler); } public synchronized void shutdownService(Object service) { if (!services.containsKey(service)) { return; } server.removeContext(services.get(service).getPath()); services.remove(service); if (services.isEmpty()) { stopServer(); } } public void get(String path, Runnable runnable) { get(path, (HttpRequest<Boolean> request) -> run(request, runnable)); } public <T> void get(String path, Supplier<T> supplier) { get(path, (HttpRequest<T> request) -> supplier(request, supplier)); } public <T> void get(String path, HttpRequestHandler<T> httpRequestHandler) { addLambda(GET, path, httpRequestHandler); } public void put(String path, Runnable runnable) { put(path, (HttpRequest<Boolean> request) -> run(request, runnable)); } public <T> void put(String path, Supplier<T> supplier) { put(path, (HttpRequest<T> request) -> supplier(request, supplier)); } public <T> void put(String path, HttpRequestHandler<T> httpRequestHandler) { addLambda(PUT, path, httpRequestHandler); } public void post(String path, Runnable runnable) { post(path, (HttpRequest<Boolean> request) -> run(request, runnable)); } public <T> void post(String path, Supplier<T> supplier) { post(path, (HttpRequest<T> request) -> supplier(request, supplier)); } public <T> void post(String path, HttpRequestHandler<T> httpRequestHandler) { addLambda(POST, path, httpRequestHandler); } public void delete(String path, Runnable runnable) { delete(path, (HttpRequest<Boolean> request) -> run(request, runnable)); } public <T> void delete(String path, Supplier<T> supplier) { delete(path, (HttpRequest<T> request) -> supplier(request, supplier)); } public <T> void delete(String path, HttpRequestHandler<T> httpRequestHandler) { addLambda(DELETE, path, httpRequestHandler); } public void all(String path, Runnable runnable) { all(path, (HttpRequest<Boolean> request) -> run(request, runnable)); } public <T> void all(String path, Supplier<T> supplier) { all(path, (HttpRequest<T> request) -> supplier(request, supplier)); } public <T> void all(String path, HttpRequestHandler<T> httpRequestHandler) { addLambda(GET, path, httpRequestHandler); addLambda(PUT, path, httpRequestHandler); addLambda(POST, path, httpRequestHandler); addLambda(DELETE, path, httpRequestHandler); } public synchronized void removePath(String path) { new HashSet<>(services.values()).forEach(httpHandler -> { httpHandler.removeHandlers(path); if (httpHandler.isEmpty()) { shutdownService(httpHandler); } }); } public synchronized void shutdown() { new HashSet<>(services.keySet()).forEach(this::shutdownService); lambdaHttpHandler.removeAll(); } private synchronized <T> void addLambda(String method, String path, HttpRequestHandler<T> httpRequestHandler) { if (lambdaHttpHandler.isEmpty()) { addService(lambdaHttpHandler); } lambdaHttpHandler.addHandler(method, path, httpRequestHandler); } private <T> void supplier(HttpRequest<T> request, Supplier<T> supplier) { request.respond(supplier.get()); } private void run(HttpRequest<Boolean> request, Runnable runnable) { runnable.run(); request.respond(Boolean.TRUE); } private RestHttpHandler getHttpHandler(Object service) { if (service == null) { return null; } if (service instanceof RestHttpHandler) { return (RestHttpHandler) service; } return new AnnotationHttpHandler(this, service); } public synchronized String getDebugResponse() { String response = ""; for (RestHttpHandler httpHandler : services.values()) { response += httpHandler.getDebugResponse(); } return response; } private void startServer() { if (server != null) { return; } try { server = HttpServer.create(inetSocketAddress, DEFAULT_BACKLOG_SIZE); server.setExecutor(executor); server.start(); } catch (IOException e) { throw new RestException(e.getMessage(), e); } } private void stopServer() { server.stop(0); server = null; executor.shutdownNow(); } protected void timeout(Runnable runnable) { timeout(runnable, timeout, TimeUnit.SECONDS); } protected void timeout(Runnable runnable, int timeout, TimeUnit timeUnit) { executor.schedule(runnable, timeout, timeUnit); } public void setDebug(boolean debug) { this.debug = debug; } public boolean getDebug() { return debug; } }
/* * $Id$ * Copyright (c) 2000-2003 Board of Trustees of Leland Stanford Jr. University, all rights reserved. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL STANFORD UNIVERSITY BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. Except as contained in this notice, the name of Stanford University shall not be used in advertising or otherwise to promote the sale, use or other dealings in this Software without prior written authorization from Stanford University. */ package org.lockss.util; import java.util.*; /** * Create a set of Object from a call list. */ public class SetUtil { /** * Don't construct. */ private SetUtil() { } /** * Create set from arg list. */ public static Set set() { return new HashSet(); } /** * Create a list from any number of arguments. */ public static Set set(Object... elements) { Set l = new HashSet(); if (elements == null) { return l; } for (Object arg : elements) { l.add(arg); } return l; } /** * Create set from arg list. */ public static Set set(Object object1) { Set l = new HashSet(); l.add(object1); return l; } /** * Create set from arg list. */ public static Set set(Object object1, Object object2) { Set l = set(object1); l.add(object2); return l; } /** * Create set from arg list. */ public static Set set(Object object1, Object object2, Object object3) { Set l = set(object1, object2); l.add(object3); return l; } /** * Create set from arg list. */ public static Set set(Object object1, Object object2, Object object3, Object object4) { Set l = set(object1, object2, object3); l.add(object4); return l; } /** * Create set from arg list. */ public static Set set(Object object1, Object object2, Object object3, Object object4, Object object5) { Set l = set(object1, object2, object3, object4); l.add(object5); return l; } /** * Create set from arg list. */ public static Set set(Object object1, Object object2, Object object3, Object object4, Object object5, Object object6) { Set l = set(object1, object2, object3, object4, object5); l.add(object6); return l; } /** * Create set from arg list. */ public static Set set(Object object1, Object object2, Object object3, Object object4, Object object5, Object object6, Object object7) { Set l = set(object1, object2, object3, object4, object5, object6); l.add(object7); return l; } /** * Create set from arg list. */ public static Set set(Object object1, Object object2, Object object3, Object object4, Object object5, Object object6, Object object7, Object object8) { Set l = set(object1, object2, object3, object4, object5, object6, object7); l.add(object8); return l; } /** * Create set from arg list. */ public static Set set(Object object1, Object object2, Object object3, Object object4, Object object5, Object object6, Object object7, Object object8, Object object9) { Set l = set(object1, object2, object3, object4, object5, object6, object7, object8); l.add(object9); return l; } /** * Create set from arg list. */ public static Set set(Object object1, Object object2, Object object3, Object object4, Object object5, Object object6, Object object7, Object object8, Object object9, Object object10) { Set l = set(object1, object2, object3, object4, object5, object6, object7, object8, object9); l.add(object10); return l; } /** Check that all elements of the set are of the specified type, and * return an unmodifiable copy of the set. * @param set the set. * @param type the class with which all items of the set * must be assignment-compatible. * @throws NullPointerException if the set is null or if any element * is null * @throws ClassCastException if an item is not of the proper type */ public static Set immutableSetOfType(Set set, Class type) { return immutableSetOfType(set, type, false); } /** Check that all elements of the set are either of the specified type * or null, and * return an unmodifiable copy of the set. * @param set the set. * @param type the class with which all items of the set * must be assignment-compatible. * @throws NullPointerException if the set is null * @throws ClassCastException if an item is not of the proper type */ public static Set immutableSetOfTypeOrNull(Set set, Class type) { return immutableSetOfType(set, type, true); } private static Set immutableSetOfType(Set set, Class type, boolean nullOk) { Set s = new HashSet(set.size()); for (Iterator iter = set.iterator(); iter.hasNext(); ) { Object item = iter.next(); if (item == null) { if (!nullOk) { throw new NullPointerException("item of list is null"); } } else if (!type.isInstance(item)) { throw new ClassCastException("item <" + item + "> of list is not an instance of " + type); } s.add(item); } return Collections.unmodifiableSet(s); } /** Create a set containing the elements of an array */ public static Set fromArray(Object array[]) { Set l = set(); for (int i = 0; i < array.length; i++) { l.add(array[i]); } return l; } /** Create a set containing the elements of a Collection */ public static Set theSet(Collection coll) { return new HashSet(coll); } /** Create a set containing the elements of a list */ public static Set fromList(List list) { return theSet(list); } /** Create a set containing the elements of an iterator */ public static Set fromIterator(Iterator iterator) { Set l = set(); while (iterator.hasNext()) { l.add(iterator.next()); } return l; } /** Create a set containing the elements of a comma separated string */ public static Set fromCSV(String csv) { Set res = set(); StringTokenizer st = new StringTokenizer(csv, ","); while (st.hasMoreTokens()) { String id = (String)st.nextToken(); res.add(id); } return res; } }
/* * FILE: PointRDD * Copyright (c) 2015 - 2019 GeoSpark Development Team * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.datasyslab.geospark.spatialRDD; import com.vividsolutions.jts.geom.Envelope; import com.vividsolutions.jts.geom.Point; import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaSparkContext; import org.apache.spark.api.java.function.FlatMapFunction; import org.apache.spark.storage.StorageLevel; import org.datasyslab.geospark.enums.FileDataSplitter; import org.datasyslab.geospark.formatMapper.FormatMapper; import org.datasyslab.geospark.formatMapper.PointFormatMapper; // TODO: Auto-generated Javadoc /** * The Class PointRDD. */ public class PointRDD extends SpatialRDD<Point> { /** * Instantiates a new point RDD. */ public PointRDD() {} /** * Instantiates a new point RDD. * * @param rawSpatialRDD the raw spatial RDD */ public PointRDD(JavaRDD<Point> rawSpatialRDD) { this.rawSpatialRDD = rawSpatialRDD; } /** * Instantiates a new point RDD. * * @param rawSpatialRDD the raw spatial RDD * @param sourceEpsgCRSCode the source epsg CRS code * @param targetEpsgCode the target epsg code */ public PointRDD(JavaRDD<Point> rawSpatialRDD, String sourceEpsgCRSCode, String targetEpsgCode) { this.rawSpatialRDD = rawSpatialRDD; this.CRSTransform(sourceEpsgCRSCode, targetEpsgCode); } /** * Instantiates a new point RDD. * * @param sparkContext the spark context * @param InputLocation the input location * @param Offset the offset * @param splitter the splitter * @param carryInputData the carry input data * @param partitions the partitions */ public PointRDD(JavaSparkContext sparkContext, String InputLocation, Integer Offset, FileDataSplitter splitter, boolean carryInputData, Integer partitions) { this(sparkContext, InputLocation, Offset, splitter, carryInputData, partitions, null, null, null); } /** * Instantiates a new point RDD. * * @param sparkContext the spark context * @param InputLocation the input location * @param Offset the offset * @param splitter the splitter * @param carryInputData the carry input data */ public PointRDD(JavaSparkContext sparkContext, String InputLocation, Integer Offset, FileDataSplitter splitter, boolean carryInputData) { this(sparkContext, InputLocation, Offset, splitter, carryInputData, null, null, null, null); } /** * Instantiates a new point RDD. * * @param sparkContext the spark context * @param InputLocation the input location * @param splitter the splitter * @param carryInputData the carry input data * @param partitions the partitions */ public PointRDD(JavaSparkContext sparkContext, String InputLocation, FileDataSplitter splitter, boolean carryInputData, Integer partitions) { this(sparkContext, InputLocation, null, splitter, carryInputData, partitions, null, null, null); } /** * Instantiates a new point RDD. * * @param sparkContext the spark context * @param InputLocation the input location * @param splitter the splitter * @param carryInputData the carry input data */ public PointRDD(JavaSparkContext sparkContext, String InputLocation, FileDataSplitter splitter, boolean carryInputData) { this(sparkContext, InputLocation, null, splitter, carryInputData, null, null, null, null); } /** * Instantiates a new point RDD. * * @param sparkContext the spark context * @param InputLocation the input location * @param partitions the partitions * @param userSuppliedMapper the user supplied mapper */ public PointRDD(JavaSparkContext sparkContext, String InputLocation, Integer partitions, FlatMapFunction userSuppliedMapper) { this.setRawSpatialRDD(sparkContext.textFile(InputLocation, partitions).mapPartitions(userSuppliedMapper)); } /** * Instantiates a new point RDD. * * @param sparkContext the spark context * @param InputLocation the input location * @param userSuppliedMapper the user supplied mapper */ public PointRDD(JavaSparkContext sparkContext, String InputLocation, FlatMapFunction userSuppliedMapper) { this.setRawSpatialRDD(sparkContext.textFile(InputLocation).mapPartitions(userSuppliedMapper)); } /** * Instantiates a new point RDD. * * @param rawSpatialRDD the raw spatial RDD * @param datasetBoundary the dataset boundary * @param approximateTotalCount the approximate total count */ public PointRDD(JavaRDD<Point> rawSpatialRDD, Envelope datasetBoundary, Integer approximateTotalCount) { this.rawSpatialRDD = rawSpatialRDD; this.boundaryEnvelope = datasetBoundary; this.approximateTotalCount = approximateTotalCount; } /** * Instantiates a new point RDD. * * @param rawSpatialRDD the raw spatial RDD * @param sourceEpsgCRSCode the source epsg CRS code * @param targetEpsgCode the target epsg code * @param datasetBoundary the dataset boundary * @param approximateTotalCount the approximate total count */ public PointRDD(JavaRDD<Point> rawSpatialRDD, String sourceEpsgCRSCode, String targetEpsgCode, Envelope datasetBoundary, Integer approximateTotalCount) { this.rawSpatialRDD = rawSpatialRDD; this.CRSTransform(sourceEpsgCRSCode, targetEpsgCode); this.boundaryEnvelope = datasetBoundary; this.approximateTotalCount = approximateTotalCount; } /** * Instantiates a new point RDD. * * @param sparkContext the spark context * @param InputLocation the input location * @param Offset the offset * @param splitter the splitter * @param carryInputData the carry input data * @param partitions the partitions * @param datasetBoundary the dataset boundary * @param approximateTotalCount the approximate total count */ public PointRDD(JavaSparkContext sparkContext, String InputLocation, Integer Offset, FileDataSplitter splitter, boolean carryInputData, Integer partitions, Envelope datasetBoundary, Integer approximateTotalCount) { this(sparkContext, InputLocation, Offset, splitter, carryInputData, partitions, null, null, null); this.boundaryEnvelope = datasetBoundary; this.approximateTotalCount = approximateTotalCount; } /** * Instantiates a new point RDD. * * @param sparkContext the spark context * @param InputLocation the input location * @param Offset the offset * @param splitter the splitter * @param carryInputData the carry input data * @param datasetBoundary the dataset boundary * @param approximateTotalCount the approximate total count */ public PointRDD(JavaSparkContext sparkContext, String InputLocation, Integer Offset, FileDataSplitter splitter, boolean carryInputData, Envelope datasetBoundary, Integer approximateTotalCount) { this(sparkContext, InputLocation, Offset, splitter, carryInputData, null, null, null, null); this.boundaryEnvelope = datasetBoundary; this.approximateTotalCount = approximateTotalCount; } /** * Instantiates a new point RDD. * * @param sparkContext the spark context * @param InputLocation the input location * @param splitter the splitter * @param carryInputData the carry input data * @param partitions the partitions * @param datasetBoundary the dataset boundary * @param approximateTotalCount the approximate total count */ public PointRDD(JavaSparkContext sparkContext, String InputLocation, FileDataSplitter splitter, boolean carryInputData, Integer partitions, Envelope datasetBoundary, Integer approximateTotalCount) { this(sparkContext, InputLocation, null, splitter, carryInputData, partitions, null, null, null); this.boundaryEnvelope = datasetBoundary; this.approximateTotalCount = approximateTotalCount; } /** * Instantiates a new point RDD. * * @param sparkContext the spark context * @param InputLocation the input location * @param splitter the splitter * @param carryInputData the carry input data * @param datasetBoundary the dataset boundary * @param approximateTotalCount the approximate total count */ public PointRDD(JavaSparkContext sparkContext, String InputLocation, FileDataSplitter splitter, boolean carryInputData, Envelope datasetBoundary, Integer approximateTotalCount) { this(sparkContext, InputLocation, null, splitter, carryInputData, null, null, null, null); this.boundaryEnvelope = datasetBoundary; this.approximateTotalCount = approximateTotalCount; } /** * Instantiates a new point RDD. * * @param sparkContext the spark context * @param InputLocation the input location * @param partitions the partitions * @param userSuppliedMapper the user supplied mapper * @param datasetBoundary the dataset boundary * @param approximateTotalCount the approximate total count */ public PointRDD(JavaSparkContext sparkContext, String InputLocation, Integer partitions, FlatMapFunction userSuppliedMapper, Envelope datasetBoundary, Integer approximateTotalCount) { this.setRawSpatialRDD(sparkContext.textFile(InputLocation, partitions).mapPartitions(userSuppliedMapper)); this.boundaryEnvelope = datasetBoundary; this.approximateTotalCount = approximateTotalCount; } /** * Instantiates a new point RDD. * * @param sparkContext the spark context * @param InputLocation the input location * @param userSuppliedMapper the user supplied mapper * @param datasetBoundary the dataset boundary * @param approximateTotalCount the approximate total count */ public PointRDD(JavaSparkContext sparkContext, String InputLocation, FlatMapFunction userSuppliedMapper, Envelope datasetBoundary, Integer approximateTotalCount) { this.setRawSpatialRDD(sparkContext.textFile(InputLocation).mapPartitions(userSuppliedMapper)); this.boundaryEnvelope = datasetBoundary; this.approximateTotalCount = approximateTotalCount; } /** * Instantiates a new point RDD. * * @param rawSpatialRDD the raw spatial RDD * @param newLevel the new level */ public PointRDD(JavaRDD<Point> rawSpatialRDD, StorageLevel newLevel) { this.rawSpatialRDD = rawSpatialRDD; this.analyze(newLevel); } /** * Instantiates a new point RDD. * * @param sparkContext the spark context * @param InputLocation the input location * @param Offset the offset * @param splitter the splitter * @param carryInputData the carry input data * @param partitions the partitions * @param newLevel the new level */ public PointRDD(JavaSparkContext sparkContext, String InputLocation, Integer Offset, FileDataSplitter splitter, boolean carryInputData, Integer partitions, StorageLevel newLevel) { this(sparkContext, InputLocation, Offset, splitter, carryInputData, partitions, newLevel, null, null); } /** * Instantiates a new point RDD. * * @param sparkContext the spark context * @param InputLocation the input location * @param Offset the offset * @param splitter the splitter * @param carryInputData the carry input data * @param newLevel the new level */ public PointRDD(JavaSparkContext sparkContext, String InputLocation, Integer Offset, FileDataSplitter splitter, boolean carryInputData, StorageLevel newLevel) { this(sparkContext, InputLocation, Offset, splitter, carryInputData, null, newLevel, null, null); } /** * Instantiates a new point RDD. * * @param sparkContext the spark context * @param InputLocation the input location * @param splitter the splitter * @param carryInputData the carry input data * @param partitions the partitions * @param newLevel the new level */ public PointRDD(JavaSparkContext sparkContext, String InputLocation, FileDataSplitter splitter, boolean carryInputData, Integer partitions, StorageLevel newLevel) { this(sparkContext, InputLocation, null, splitter, carryInputData, partitions, newLevel, null, null); } /** * Instantiates a new point RDD. * * @param sparkContext the spark context * @param InputLocation the input location * @param splitter the splitter * @param carryInputData the carry input data * @param newLevel the new level */ public PointRDD(JavaSparkContext sparkContext, String InputLocation, FileDataSplitter splitter, boolean carryInputData, StorageLevel newLevel) { this(sparkContext, InputLocation, null, splitter, carryInputData, null, newLevel, null, null); } /** * Instantiates a new point RDD. * * @param sparkContext the spark context * @param InputLocation the input location * @param partitions the partitions * @param userSuppliedMapper the user supplied mapper * @param newLevel the new level */ public PointRDD(JavaSparkContext sparkContext, String InputLocation, Integer partitions, FlatMapFunction userSuppliedMapper, StorageLevel newLevel) { this.setRawSpatialRDD(sparkContext.textFile(InputLocation, partitions).mapPartitions(userSuppliedMapper)); this.analyze(newLevel); } /** * Instantiates a new point RDD. * * @param sparkContext the spark context * @param InputLocation the input location * @param userSuppliedMapper the user supplied mapper * @param newLevel the new level */ public PointRDD(JavaSparkContext sparkContext, String InputLocation, FlatMapFunction userSuppliedMapper, StorageLevel newLevel) { this.setRawSpatialRDD(sparkContext.textFile(InputLocation).mapPartitions(userSuppliedMapper)); this.analyze(newLevel); } /** * Instantiates a new point RDD. * * @param rawSpatialRDD the raw spatial RDD * @param newLevel the new level * @param sourceEpsgCRSCode the source epsg CRS code * @param targetEpsgCode the target epsg code */ public PointRDD(JavaRDD<Point> rawSpatialRDD, StorageLevel newLevel, String sourceEpsgCRSCode, String targetEpsgCode) { this.rawSpatialRDD = rawSpatialRDD; this.CRSTransform(sourceEpsgCRSCode, targetEpsgCode); this.analyze(newLevel); } /** * Instantiates a new point RDD. * * @param sparkContext the spark context * @param InputLocation the input location * @param Offset the offset * @param splitter the splitter * @param carryInputData the carry input data * @param partitions the partitions * @param newLevel the new level * @param sourceEpsgCRSCode the source epsg CRS code * @param targetEpsgCode the target epsg code */ public PointRDD(JavaSparkContext sparkContext, String InputLocation, Integer Offset, FileDataSplitter splitter, boolean carryInputData, Integer partitions, StorageLevel newLevel, String sourceEpsgCRSCode, String targetEpsgCode) { JavaRDD rawTextRDD = partitions != null ? sparkContext.textFile(InputLocation, partitions) : sparkContext.textFile(InputLocation); if (Offset != null) {this.setRawSpatialRDD(rawTextRDD.mapPartitions(new PointFormatMapper(Offset, splitter, carryInputData)));} else {this.setRawSpatialRDD(rawTextRDD.mapPartitions(new PointFormatMapper(splitter, carryInputData)));} if (sourceEpsgCRSCode != null && targetEpsgCode != null) { this.CRSTransform(sourceEpsgCRSCode, targetEpsgCode);} if (newLevel != null) { this.analyze(newLevel);} if (splitter.equals(FileDataSplitter.GEOJSON)) { this.fieldNames = FormatMapper.readGeoJsonPropertyNames(rawTextRDD.take(1).get(0).toString()); } } /** * Instantiates a new point RDD. * * @param sparkContext the spark context * @param InputLocation the input location * @param Offset the offset * @param splitter the splitter * @param carryInputData the carry input data * @param newLevel the new level * @param sourceEpsgCRSCode the source epsg CRS code * @param targetEpsgCode the target epsg code */ public PointRDD(JavaSparkContext sparkContext, String InputLocation, Integer Offset, FileDataSplitter splitter, boolean carryInputData, StorageLevel newLevel, String sourceEpsgCRSCode, String targetEpsgCode) { this(sparkContext, InputLocation, Offset, splitter, carryInputData, null, newLevel, sourceEpsgCRSCode, targetEpsgCode); } /** * Instantiates a new point RDD. * * @param sparkContext the spark context * @param InputLocation the input location * @param splitter the splitter * @param carryInputData the carry input data * @param partitions the partitions * @param newLevel the new level * @param sourceEpsgCRSCode the source epsg CRS code * @param targetEpsgCode the target epsg code */ public PointRDD(JavaSparkContext sparkContext, String InputLocation, FileDataSplitter splitter, boolean carryInputData, Integer partitions, StorageLevel newLevel, String sourceEpsgCRSCode, String targetEpsgCode) { this(sparkContext, InputLocation, null, splitter, carryInputData, partitions, newLevel, sourceEpsgCRSCode, targetEpsgCode); } /** * Instantiates a new point RDD. * * @param sparkContext the spark context * @param InputLocation the input location * @param splitter the splitter * @param carryInputData the carry input data * @param newLevel the new level * @param sourceEpsgCRSCode the source epsg CRS code * @param targetEpsgCode the target epsg code */ public PointRDD(JavaSparkContext sparkContext, String InputLocation, FileDataSplitter splitter, boolean carryInputData, StorageLevel newLevel, String sourceEpsgCRSCode, String targetEpsgCode) { this(sparkContext, InputLocation, null, splitter, carryInputData, null, newLevel, sourceEpsgCRSCode, targetEpsgCode); } /** * Instantiates a new point RDD. * * @param sparkContext the spark context * @param InputLocation the input location * @param partitions the partitions * @param userSuppliedMapper the user supplied mapper * @param newLevel the new level * @param sourceEpsgCRSCode the source epsg CRS code * @param targetEpsgCode the target epsg code */ public PointRDD(JavaSparkContext sparkContext, String InputLocation, Integer partitions, FlatMapFunction userSuppliedMapper, StorageLevel newLevel, String sourceEpsgCRSCode, String targetEpsgCode) { this(sparkContext, InputLocation, null, null, false, partitions, newLevel, sourceEpsgCRSCode, targetEpsgCode); } /** * Instantiates a new point RDD. * * @param sparkContext the spark context * @param InputLocation the input location * @param userSuppliedMapper the user supplied mapper * @param newLevel the new level * @param sourceEpsgCRSCode the source epsg CRS code * @param targetEpsgCode the target epsg code */ public PointRDD(JavaSparkContext sparkContext, String InputLocation, FlatMapFunction userSuppliedMapper, StorageLevel newLevel, String sourceEpsgCRSCode, String targetEpsgCode) { this(sparkContext, InputLocation, null, null, false, null, newLevel, sourceEpsgCRSCode, targetEpsgCode); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.hazelcast.services.cacheclient; import org.apache.nifi.annotation.documentation.CapabilityDescription; import org.apache.nifi.annotation.documentation.Tags; import org.apache.nifi.annotation.lifecycle.OnDisabled; import org.apache.nifi.annotation.lifecycle.OnEnabled; import org.apache.nifi.components.PropertyDescriptor; import org.apache.nifi.controller.AbstractControllerService; import org.apache.nifi.controller.ConfigurationContext; import org.apache.nifi.distributed.cache.client.AtomicCacheEntry; import org.apache.nifi.distributed.cache.client.AtomicDistributedMapCacheClient; import org.apache.nifi.distributed.cache.client.Deserializer; import org.apache.nifi.distributed.cache.client.Serializer; import org.apache.nifi.expression.ExpressionLanguageScope; import org.apache.nifi.hazelcast.services.cache.HazelcastCache; import org.apache.nifi.hazelcast.services.cachemanager.HazelcastCacheManager; import org.apache.nifi.processor.util.StandardValidators; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.Serializable; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Objects; import java.util.concurrent.TimeUnit; import java.util.function.Predicate; import java.util.regex.Pattern; /** * An implementation of DistributedMapCacheClient that uses Hazelcast as the backing cache. * * Note: By design, the client should not directly depend on Hazelcast specific classes to allow easy version and implementation changes. */ @Tags({ "hazelcast", "cache", "map"}) @CapabilityDescription("An implementation of DistributedMapCacheClient that uses Hazelcast as the backing cache. This service relies on " + "an other controller service, manages the actual Hazelcast calls, set in Hazelcast Cache Manager.") public class HazelcastMapCacheClient extends AbstractControllerService implements AtomicDistributedMapCacheClient<Long> { public static final PropertyDescriptor HAZELCAST_CACHE_MANAGER = new PropertyDescriptor.Builder() .name("hazelcast-cache-manager") .displayName("Hazelcast Cache Manager") .description("A Hazelcast Cache Manager which manages connections to Hazelcast and provides cache instances.") .identifiesControllerService(HazelcastCacheManager.class) .required(true) .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) .build(); public static final PropertyDescriptor HAZELCAST_CACHE_NAME = new PropertyDescriptor.Builder() .name("hazelcast-cache-name") .displayName("Hazelcast Cache Name") .description("The name of a given cache. A Hazelcast cluster may handle multiple independent caches, each identified by a name." + " Clients using caches with the same name are working on the same data structure within Hazelcast.") .required(true) .addValidator(StandardValidators.NON_BLANK_VALIDATOR) .expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY) .build(); public static final PropertyDescriptor HAZELCAST_ENTRY_TTL = new PropertyDescriptor.Builder() .name("hazelcast-entry-ttl") .displayName("Hazelcast Entry Lifetime") .description("Indicates how long the written entries should exist in Hazelcast. Setting it to '0 secs' means that the data" + "will exists until its deletion or until the Hazelcast server is shut down.") .required(true) .addValidator(StandardValidators.TIME_PERIOD_VALIDATOR) .defaultValue("0 secs") // Note: in case of Hazelcast IMap, negative value would mean "map default" which might be overridden by a different client. .build(); private static final long STARTING_REVISION = 1; private static final List<PropertyDescriptor> PROPERTY_DESCRIPTORS; static { final List<PropertyDescriptor> properties = new ArrayList<>(); properties.add(HAZELCAST_CACHE_MANAGER); properties.add(HAZELCAST_CACHE_NAME); properties.add(HAZELCAST_ENTRY_TTL); PROPERTY_DESCRIPTORS = Collections.unmodifiableList(properties); } private volatile HazelcastCache cache = null; @OnEnabled public void onEnabled(final ConfigurationContext context) { final HazelcastCacheManager hazelcastCacheManager = context.getProperty(HAZELCAST_CACHE_MANAGER).asControllerService(HazelcastCacheManager.class); cache = hazelcastCacheManager.getCache( context.getProperty(HAZELCAST_CACHE_NAME).evaluateAttributeExpressions().getValue(), context.getProperty(HAZELCAST_ENTRY_TTL).asTimePeriod(TimeUnit.MILLISECONDS)); getLogger().debug("Enable Hazelcast cache client for cache " + cache.name()); } @OnDisabled public void onDisabled() { if (cache != null) { // The cache state will be preserved until the Service is not stopped! getLogger().debug("Disable Hazelcast cache client for cache " + cache.name()); cache = null; } } @Override public <K, V> AtomicCacheEntry<K, V, Long> fetch(final K key, final Serializer<K> keySerializer, final Deserializer<V> valueDeserializer) throws IOException { final byte[] result = cache.get(getCacheEntryKey(key, keySerializer)); return (result == null) ? null : new AtomicCacheEntry<>(key, parsePayload(valueDeserializer, result), parseRevision(result)); } @Override public <K, V> boolean replace(final AtomicCacheEntry<K, V, Long> entry, final Serializer<K> keySerializer, final Serializer<V> valueSerializer) throws IOException { if (entry.getKey() == null) { return false; } final String key = getCacheEntryKey(entry.getKey(), keySerializer); try(final HazelcastCache.HazelcastCacheEntryLock lock = cache.acquireLock(key)) { final byte[] oldValue = cache.get(key); if (oldValue == null && (!entry.getRevision().isPresent() || entry.getRevision().get() < STARTING_REVISION)) { cache.put(key, serialize(entry.getValue(), valueSerializer, STARTING_REVISION)); getLogger().debug("Entry with key " + key + " was added during replace"); return true; } else if (oldValue != null && Objects.equals(entry.getRevision().get(), parseRevision(oldValue))) { cache.put(key, serialize(entry.getValue(), valueSerializer, entry.getRevision().get() + 1)); getLogger().debug("Entry with key " + key + " was updated during replace, with revision " + entry.getRevision().get() + 1); return true; } } return false; } @Override public <K, V> boolean putIfAbsent(final K key, final V value, final Serializer<K> keySerializer, final Serializer<V> valueSerializer) throws IOException { return cache.putIfAbsent(getCacheEntryKey(key, keySerializer), serialize(value, valueSerializer, STARTING_REVISION)) == null; } @Override public <K, V> V getAndPutIfAbsent( final K key, final V value, final Serializer<K> keySerializer, final Serializer<V> valueSerializer, final Deserializer<V> valueDeserializer ) throws IOException { final byte[] result = cache.putIfAbsent(getCacheEntryKey(key, keySerializer), serialize(value, valueSerializer, STARTING_REVISION)); return (result == null) ? null : parsePayload(valueDeserializer, result); } @Override public <K> boolean containsKey(final K key, final Serializer<K> keySerializer) throws IOException { return cache.contains(getCacheEntryKey(key, keySerializer)); } @Override public <K, V> void put(final K key, final V value, final Serializer<K> keySerializer, final Serializer<V> valueSerializer) throws IOException { cache.put(getCacheEntryKey(key, keySerializer), serialize(value, valueSerializer, STARTING_REVISION)); } @Override public <K, V> V get(final K key, final Serializer<K> keySerializer, final Deserializer<V> valueDeserializer) throws IOException { final byte[] result = cache.get(getCacheEntryKey(key, keySerializer)); return result == null ? null : parsePayload(valueDeserializer, result); } @Override public <K> boolean remove(final K key, final Serializer<K> keySerializer) throws IOException { return cache.remove(getCacheEntryKey(key, keySerializer)); } @Override public long removeByPattern(final String regex) throws IOException { return cache.removeAll(new RegexPredicate(regex)); } private static class RegexPredicate implements Predicate<String>, Serializable { private final Pattern pattern; private RegexPredicate(final String regex) { this.pattern = Pattern.compile(regex); } @Override public boolean test(final String string) { return pattern.matcher(string).matches(); } } @Override public void close() { getLogger().debug("Closing {}", getClass().getSimpleName()); } @Override protected List<PropertyDescriptor> getSupportedPropertyDescriptors() { return PROPERTY_DESCRIPTORS; } private static long parseRevision(final byte[] value) { return ByteBuffer.wrap(Arrays.copyOfRange(value, 0, Long.BYTES)).getLong(); } private static <V> V parsePayload(final Deserializer<V> deserializer, final byte[] value) throws IOException { return deserializer.deserialize(Arrays.copyOfRange(value, Long.BYTES, value.length)); } private <S> String getCacheEntryKey(final S key, final Serializer<S> serializer) throws IOException { final String result; if (key instanceof String) { result = (String) key; } else { final ByteArrayOutputStream stream = new ByteArrayOutputStream(); serializer.serialize(key, stream); result = stream.toString("UTF-8"); } if (result.isEmpty()) { throw new IOException("Cache record key cannot be empty!"); } return result; } /** * Serializes a value using the given serializer. The first eight bytes of the array contains the revision. * The rest holds the actual serialized value. * * @param value The value to serialize. * @param serializer The serializer to use in order to serialize the incoming value. * @param version The version of the entry. * @param <S> The type of the value to be serialized. * * @return Byte array containing both version and value of the cache entry. * * @throws IOException In case of any issue during working with intermediate byte stream. */ private <S> byte[] serialize(final S value, final Serializer<S> serializer, final long version) throws IOException { final ByteArrayOutputStream stream = new ByteArrayOutputStream(); stream.write(getVersionByteArray(version)); serializer.serialize(value, stream); return stream.toByteArray(); } private byte[] getVersionByteArray(final long version) { return ByteBuffer.allocate(Long.BYTES).putLong(version).array(); } }
/** * Copyright (c) 2003 The Apereo Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://opensource.org/licenses/ecl2 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sakaiproject.contentreview.service; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Optional; import java.util.SortedSet; import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang.StringUtils; import org.sakaiproject.component.api.ServerConfigurationService; import org.sakaiproject.content.api.ContentResource; import org.sakaiproject.contentreview.dao.ContentReviewItem; import org.sakaiproject.contentreview.exception.ContentReviewProviderException; import org.sakaiproject.contentreview.exception.QueueException; import org.sakaiproject.contentreview.exception.ReportException; import org.sakaiproject.contentreview.exception.SubmissionException; import org.sakaiproject.contentreview.exception.TransientSubmissionException; import org.sakaiproject.site.api.Site; import org.sakaiproject.site.api.SiteService; import org.sakaiproject.tool.api.ToolManager; import lombok.Setter; /* This class is passed a list of providers in the bean as references, it will use the first * by default unless overridden by a site property. */ @Slf4j public class ContentReviewFederatedServiceImpl implements ContentReviewService { @Setter private ServerConfigurationService serverConfigurationService; @Setter private ToolManager toolManager; @Setter private SiteService siteService; @Setter private List<ContentReviewService> providers; private String defaultProvider; private List<String> enabledProviders; public void init() { enabledProviders = configureEnabledProviders(); if (enabledProviders.isEmpty()) { ContentReviewService noop = new NoOpContentReviewService(); providers.add(noop); enabledProviders.add(noop.getServiceName()); } providers.stream().forEach(p -> log.debug("Found Content Review Provider: "+ p.getServiceName() + " with providerId of " + p.getProviderId())); enabledProviders.stream().forEach(p -> log.info("Enabled Content Review Provider: " + p + " with providerId of " + Math.abs(p.hashCode()))); Optional<String> configuredDefaultProvider = Optional.ofNullable(serverConfigurationService.getString("contentreview.defaultProvider")); if (configuredDefaultProvider.isPresent()) { String cdp = configuredDefaultProvider.get(); if (enabledProviders.contains(cdp)) { defaultProvider = cdp; log.info("Default Content Review Provider: " + defaultProvider + " with providerId of " + Math.abs(defaultProvider.hashCode())); } } if (StringUtils.isBlank(defaultProvider)) { // set the default provider to the first provider in the list defaultProvider = enabledProviders.get(0); log.info("Default Content Review Provider: " + defaultProvider + " with providerId of " + Math.abs(defaultProvider.hashCode())); } } private Optional<Site> getCurrentSite() { Optional<Site> site = null; try { String context = toolManager.getCurrentPlacement().getContext(); site = Optional.of(siteService.getSite(context)); } catch (Exception e) { // sakai failed to get us a location so we can assume we are not ins // ide the portal site = Optional.empty(); } return site; } private List<String> configureEnabledProviders() { List<String> enabledProviders = new ArrayList<>(); Optional<String[]> configuredProviders = Optional.ofNullable(serverConfigurationService.getStrings("contentreview.enabledProviders")); if (configuredProviders.isPresent()) { List<String> configProviders = Arrays.asList(configuredProviders.get()); enabledProviders = providers.stream().filter(crs -> configProviders.contains(crs.getServiceName())).map(crs -> crs.getServiceName()).collect(Collectors.toList()); } return enabledProviders; } private ContentReviewService getSelectedProvider() { if (StringUtils.isBlank(defaultProvider)) { throw new ContentReviewProviderException("No Default Content Review Provider"); } Optional<Site> currentSite = getCurrentSite(); if (currentSite.isPresent()) { if (log.isDebugEnabled()) log.debug("In Location:" + currentSite.get().getReference()); final String overrideProvider = currentSite.get().getProperties().getProperty("contentreview.provider"); if (enabledProviders.contains(overrideProvider)) { return providers.stream().filter(crs -> crs.getServiceName().equals(overrideProvider)).collect(Collectors.toList()).get(0); } } return providers.stream().filter(crs -> crs.getServiceName().equals(defaultProvider)).collect(Collectors.toList()).get(0); } public boolean allowResubmission() { return getSelectedProvider().allowResubmission(); } public void checkForReports() { // this is a method that the jobs call and should check for reports for all enabled providers providers.stream().filter(provider -> enabledProviders.contains(provider.getServiceName())).forEach(ContentReviewService::checkForReports); } public void createAssignment(String arg0, String arg1, Map arg2) throws SubmissionException, TransientSubmissionException { getSelectedProvider().createAssignment(arg0, arg1, arg2); } public List<ContentReviewItem> getAllContentReviewItems(String arg0, String arg1) throws QueueException, SubmissionException, ReportException { return getSelectedProvider().getAllContentReviewItems(arg0, arg1); } public Map getAssignment(String arg0, String arg1) throws SubmissionException, TransientSubmissionException { return getSelectedProvider().getAssignment(arg0, arg1); } public Date getDateQueued(String arg0) throws QueueException { return getSelectedProvider().getDateQueued(arg0); } public Date getDateSubmitted(String arg0) throws QueueException, SubmissionException { return getSelectedProvider().getDateSubmitted(arg0); } public String getIconUrlforScore(Long score) { return getSelectedProvider().getIconUrlforScore(score); } public String getLocalizedStatusMessage(String arg0) { return getSelectedProvider().getLocalizedStatusMessage(arg0); } public String getLocalizedStatusMessage(String arg0, String arg1) { return getSelectedProvider().getLocalizedStatusMessage(arg0, arg1); } public String getLocalizedStatusMessage(String arg0, Locale arg1) { return getSelectedProvider().getLocalizedStatusMessage(arg0, arg1); } public List<ContentReviewItem> getReportList(String siteId) throws QueueException, SubmissionException, ReportException { return getSelectedProvider().getReportList(siteId); } public List<ContentReviewItem> getReportList(String siteId, String taskId) throws QueueException, SubmissionException, ReportException { return getSelectedProvider().getReportList(siteId, taskId); } public String getReviewReport(String contentId, String assignmentRef, String userId) throws QueueException, ReportException { return getSelectedProvider().getReviewReport(contentId, assignmentRef, userId); } public String getReviewReportInstructor(String contentId, String assignmentRef, String userId) throws QueueException, ReportException { return getSelectedProvider().getReviewReportInstructor(contentId, assignmentRef, userId); } public String getReviewReportStudent(String contentId, String assignmentRef, String userId) throws QueueException, ReportException { return getSelectedProvider().getReviewReportStudent(contentId, assignmentRef, userId); } public Long getReviewStatus(String contentId) throws QueueException { return getSelectedProvider().getReviewStatus(contentId); } public String getServiceName() { return getSelectedProvider().getServiceName(); } public boolean allowAllContent() { return getSelectedProvider().allowAllContent(); } public boolean isAcceptableContent(ContentResource arg0) { return getSelectedProvider().isAcceptableContent(arg0); } public Map<String, SortedSet<String>> getAcceptableExtensionsToMimeTypes() { return getSelectedProvider().getAcceptableExtensionsToMimeTypes(); } public Map<String, SortedSet<String>> getAcceptableFileTypesToExtensions() { return getSelectedProvider().getAcceptableFileTypesToExtensions(); } public boolean isSiteAcceptable(Site arg0) { return getSelectedProvider().isSiteAcceptable(arg0); } public void processQueue() { // this is a method that the jobs call and should process items for all enabled providers providers.stream().filter(provider -> enabledProviders.contains(provider.getServiceName())).forEach(ContentReviewService::processQueue); } public void queueContent(String userId, String siteId, String assignmentReference, List<ContentResource> content) throws QueueException { getSelectedProvider().queueContent(userId, siteId, assignmentReference, content); } public void removeFromQueue(String arg0) { getSelectedProvider().removeFromQueue(arg0); } public void resetUserDetailsLockedItems(String arg0) { getSelectedProvider().resetUserDetailsLockedItems(arg0); } public String getReviewError(String contentId) { return getSelectedProvider().getReviewError(contentId); } public int getReviewScore(String contentId, String assignmentRef, String userId) throws QueueException, ReportException, Exception { return getSelectedProvider().getReviewScore(contentId, assignmentRef, userId); } }
/* * Copyright (c) 2008-2016 Haulmont. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.haulmont.cuba.gui.components.actions; import com.haulmont.chile.core.model.MetaClass; import com.haulmont.chile.core.model.MetaProperty; import com.haulmont.cuba.core.entity.Entity; import com.haulmont.cuba.core.global.AppBeans; import com.haulmont.cuba.core.global.ExtendedEntities; import com.haulmont.cuba.core.global.Messages; import com.haulmont.cuba.core.global.Metadata; import com.haulmont.cuba.gui.components.Action; import com.haulmont.cuba.gui.components.Component; import com.haulmont.cuba.gui.components.ListComponent; import com.haulmont.cuba.gui.data.CollectionDatasource; import com.haulmont.cuba.gui.data.Datasource; import com.haulmont.cuba.gui.data.NestedDatasource; import com.haulmont.cuba.gui.data.PropertyDatasource; import com.haulmont.cuba.gui.icons.CubaIcon; import com.haulmont.cuba.gui.icons.Icons; import com.haulmont.cuba.security.entity.EntityAttrAccess; import org.springframework.context.annotation.Scope; import java.util.Set; /** * The <code>RemoveAction</code> variant that excludes instances from the list, but doesn't delete them from DB. * <p> * In order to provide your own implementation globally, create a subclass and register it in {@code web-spring.xml}, * for example: * <pre> * &lt;bean id="cuba_ExcludeAction" class="com.company.sample.gui.MyExcludeAction" scope="prototype"/&gt; * </pre> * Also, use {@code create()} static methods instead of constructors when creating the action programmatically. */ @org.springframework.stereotype.Component("cuba_ExcludeAction") @Scope("prototype") public class ExcludeAction extends RemoveAction implements Action.DisabledWhenScreenReadOnly { public static final String ACTION_ID = ListActionType.EXCLUDE.getId(); protected Metadata metadata; /** * Creates an action with default id. Autocommit and Confirm properties are set to false. * @param target component containing this action */ public static ExcludeAction create(ListComponent target) { return AppBeans.getPrototype("cuba_ExcludeAction", target); } /** * Creates an action with default id. * @param target component containing this action * @param autocommit whether to commit datasource immediately * @param confirm whether to show the confirmation dialog to user */ public static ExcludeAction create(ListComponent target, boolean autocommit, boolean confirm) { return AppBeans.getPrototype("cuba_ExcludeAction", target, autocommit, confirm); } /** * Creates an action with the given id. * @param target component containing this action * @param autocommit whether to commit datasource immediately * @param confirm whether to show the confirmation dialog to user * @param id action's name */ public static ExcludeAction create(ListComponent target, boolean autocommit, boolean confirm, String id) { return AppBeans.getPrototype("cuba_ExcludeAction", target, autocommit, confirm, id); } /** * The simplest constructor. Autocommit and Confirm properties are set to false, the action has default name. * @param target component containing this action */ public ExcludeAction(ListComponent target) { this(target, false, false, ACTION_ID); } /** * Constructor that allows to specify autocommit and confirm value. The action has default name. * @param target component containing this action * @param autocommit whether to commit datasource immediately * @param confirm whether to show the confirmation dialog to user */ public ExcludeAction(ListComponent target, boolean autocommit, boolean confirm) { this(target, autocommit, confirm, ACTION_ID); } /** * Constructor that allows to specify all parameters. * @param target component containing this action * @param autocommit whether to commit datasource immediately * @param confirm whether to show the confirmation dialog to user * @param id action's name */ public ExcludeAction(ListComponent target, boolean autocommit, boolean confirm, String id) { super(target, autocommit, id); this.confirm = confirm; Messages messages = AppBeans.get(Messages.NAME); this.caption = messages.getMainMessage("actions.Exclude"); this.metadata = AppBeans.get(Metadata.NAME); this.icon = AppBeans.get(Icons.class).get(CubaIcon.EXCLUDE_ACTION); } @Override protected boolean checkRemovePermission() { CollectionDatasource ds = target.getDatasource(); if (ds instanceof PropertyDatasource) { PropertyDatasource propertyDatasource = (PropertyDatasource) ds; MetaClass parentMetaClass = propertyDatasource.getMaster().getMetaClass(); MetaProperty metaProperty = propertyDatasource.getProperty(); boolean attrPermitted = security.isEntityAttrPermitted(parentMetaClass, metaProperty.getName(), EntityAttrAccess.MODIFY); if (!attrPermitted) { return false; } } return true; } @Override public void actionPerform(Component component) { if (!isEnabled()) return; if (beforeActionPerformedHandler != null) { if (!beforeActionPerformedHandler.beforeActionPerformed()) return; } @SuppressWarnings("unchecked") Set<Entity> selected = target.getSelected(); if (!selected.isEmpty()) { if (confirm) { confirmAndRemove(selected); } else { doRemove(selected, autocommit); afterRemove(selected); if (afterRemoveHandler != null) { afterRemoveHandler.handle(selected); } } } } @SuppressWarnings("unchecked") @Override protected void doRemove(Set<Entity> selected, boolean autocommit) { CollectionDatasource ds = target.getDatasource(); if (ds instanceof NestedDatasource) { // Clear reference to master entity Datasource masterDs = ((NestedDatasource) ds).getMaster(); MetaProperty metaProperty = ((NestedDatasource) ds).getProperty(); if (masterDs != null && metaProperty != null) { MetaProperty inverseProp = metaProperty.getInverse(); if (inverseProp != null) { ExtendedEntities extendedEntities = metadata.getExtendedEntities(); Class inversePropClass = extendedEntities.getEffectiveClass(inverseProp.getDomain()); Class dsClass = extendedEntities.getEffectiveClass(ds.getMetaClass()); if (inversePropClass.isAssignableFrom(dsClass)) { for (Entity item : selected) { item.setValue(inverseProp.getName(), null); } } } } } for (Entity item : selected) { ds.modifyItem(item); ds.excludeItem(item); } if (autocommit && (ds.getCommitMode() != Datasource.CommitMode.PARENT)) { try { ds.commit(); } catch (RuntimeException e) { ds.refresh(); throw e; } } } }
package org.apache.rya.indexing.external; import java.net.UnknownHostException; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import java.util.List; import org.apache.accumulo.core.client.AccumuloException; import org.apache.accumulo.core.client.AccumuloSecurityException; import org.apache.accumulo.core.client.Connector; import org.apache.accumulo.core.client.TableExistsException; import org.apache.accumulo.core.client.TableNotFoundException; import org.apache.accumulo.core.client.mock.MockInstance; import org.apache.accumulo.core.client.security.tokens.PasswordToken; import org.apache.rya.indexing.pcj.storage.PcjException; import org.apache.rya.indexing.pcj.storage.accumulo.PcjVarOrderFactory; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.openrdf.model.URI; import org.openrdf.model.impl.LiteralImpl; import org.openrdf.model.impl.URIImpl; import org.openrdf.model.vocabulary.RDF; import org.openrdf.model.vocabulary.RDFS; import org.openrdf.query.BindingSet; import org.openrdf.query.MalformedQueryException; import org.openrdf.query.QueryEvaluationException; import org.openrdf.query.QueryLanguage; import org.openrdf.query.QueryResultHandlerException; import org.openrdf.query.TupleQueryResultHandler; import org.openrdf.query.TupleQueryResultHandlerException; import org.openrdf.repository.RepositoryException; import org.openrdf.repository.sail.SailRepository; import org.openrdf.repository.sail.SailRepositoryConnection; import org.openrdf.sail.SailException; import com.google.common.base.Optional; import org.apache.rya.api.persist.RyaDAOException; import org.apache.rya.rdftriplestore.inference.InferenceEngineException; public class PrecompJoinOptimizerIntegrationTest { private SailRepositoryConnection conn, pcjConn; private SailRepository repo, pcjRepo; private Connector accCon; String tablePrefix = "table_"; URI sub, sub2, obj, obj2, subclass, subclass2, talksTo; @Before public void init() throws RepositoryException, TupleQueryResultHandlerException, QueryEvaluationException, MalformedQueryException, AccumuloException, AccumuloSecurityException, TableExistsException, RyaDAOException, TableNotFoundException, InferenceEngineException, NumberFormatException, UnknownHostException, SailException { repo = PcjIntegrationTestingUtil.getNonPcjRepo(tablePrefix, "instance"); conn = repo.getConnection(); pcjRepo = PcjIntegrationTestingUtil.getPcjRepo(tablePrefix, "instance"); pcjConn = pcjRepo.getConnection(); sub = new URIImpl("uri:entity"); subclass = new URIImpl("uri:class"); obj = new URIImpl("uri:obj"); talksTo = new URIImpl("uri:talksTo"); conn.add(sub, RDF.TYPE, subclass); conn.add(sub, RDFS.LABEL, new LiteralImpl("label")); conn.add(sub, talksTo, obj); sub2 = new URIImpl("uri:entity2"); subclass2 = new URIImpl("uri:class2"); obj2 = new URIImpl("uri:obj2"); conn.add(sub2, RDF.TYPE, subclass2); conn.add(sub2, RDFS.LABEL, new LiteralImpl("label2")); conn.add(sub2, talksTo, obj2); accCon = new MockInstance("instance").getConnector("root", new PasswordToken("")); } @After public void close() throws RepositoryException, AccumuloException, AccumuloSecurityException, TableNotFoundException { PcjIntegrationTestingUtil.closeAndShutdown(conn, repo); PcjIntegrationTestingUtil.closeAndShutdown(pcjConn, pcjRepo); PcjIntegrationTestingUtil.deleteCoreRyaTables(accCon, tablePrefix); PcjIntegrationTestingUtil.deleteIndexTables(accCon, 2, tablePrefix); } @Test public void testEvaluateSingeIndex() throws TupleQueryResultHandlerException, QueryEvaluationException, MalformedQueryException, RepositoryException, AccumuloException, AccumuloSecurityException, TableExistsException, RyaDAOException, SailException, TableNotFoundException, PcjException, InferenceEngineException, NumberFormatException, UnknownHostException { final String indexSparqlString = ""// + "SELECT ?e ?l ?c " // + "{" // + " ?e a ?c . "// + " ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l "// + "}";// PcjIntegrationTestingUtil.createAndPopulatePcj(conn, accCon, tablePrefix + "INDEX_1", indexSparqlString, new String[] { "e", "l", "c" }, Optional.<PcjVarOrderFactory> absent()); final String queryString = ""// + "SELECT ?e ?c ?l ?o " // + "{" // + " ?e a ?c . "// + " ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "// + " ?e <uri:talksTo> ?o . "// + "}";// final CountingResultHandler crh = new CountingResultHandler(); PcjIntegrationTestingUtil.deleteCoreRyaTables(accCon, tablePrefix); PcjIntegrationTestingUtil.closeAndShutdown(conn, repo); repo = PcjIntegrationTestingUtil.getPcjRepo(tablePrefix, "instance"); conn = repo.getConnection(); conn.add(sub, talksTo, obj); conn.add(sub2, talksTo, obj2); pcjConn.prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh); Assert.assertEquals(2, crh.getCount()); } @Test public void testEvaluateTwoIndexTwoVarOrder1() throws AccumuloException, AccumuloSecurityException, TableExistsException, RepositoryException, MalformedQueryException, SailException, QueryEvaluationException, TableNotFoundException, TupleQueryResultHandlerException, RyaDAOException, PcjException { conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); final String indexSparqlString = ""// + "SELECT ?e ?l ?c " // + "{" // + " ?e a ?c . "// + " ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l "// + "}";// final String indexSparqlString2 = ""// + "SELECT ?e ?o ?l " // + "{" // + " ?e <uri:talksTo> ?o . "// + " ?o <http://www.w3.org/2000/01/rdf-schema#label> ?l "// + "}";// final String queryString = ""// + "SELECT ?e ?c ?l ?o " // + "{" // + " ?e a ?c . "// + " ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "// + " ?e <uri:talksTo> ?o . "// + " ?o <http://www.w3.org/2000/01/rdf-schema#label> ?l "// + "}";// PcjIntegrationTestingUtil.createAndPopulatePcj(conn, accCon, tablePrefix + "INDEX_1", indexSparqlString, new String[] { "e", "l", "c" }, Optional.<PcjVarOrderFactory> absent()); PcjIntegrationTestingUtil.createAndPopulatePcj(conn, accCon, tablePrefix + "INDEX_2", indexSparqlString2, new String[] { "e", "l", "o" }, Optional.<PcjVarOrderFactory> absent()); final CountingResultHandler crh = new CountingResultHandler(); PcjIntegrationTestingUtil.deleteCoreRyaTables(accCon, tablePrefix); pcjConn.prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate( crh); Assert.assertEquals(2, crh.getCount()); } @Test public void testEvaluateSingeFilterIndex() throws TupleQueryResultHandlerException, QueryEvaluationException, MalformedQueryException, RepositoryException, AccumuloException, AccumuloSecurityException, TableExistsException, RyaDAOException, SailException, TableNotFoundException, PcjException, InferenceEngineException, NumberFormatException, UnknownHostException { final String indexSparqlString = ""// + "SELECT ?e ?l ?c " // + "{" // + " Filter(?e = <uri:entity>) " // + " ?e a ?c . "// + " ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l "// + "}";// PcjIntegrationTestingUtil.createAndPopulatePcj(conn, accCon, tablePrefix + "INDEX_1", indexSparqlString, new String[] { "e", "l", "c" }, Optional.<PcjVarOrderFactory> absent()); final String queryString = ""// + "SELECT ?e ?c ?l ?o " // + "{" // + " Filter(?e = <uri:entity>) " // + " ?e a ?c . "// + " ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "// + " ?e <uri:talksTo> ?o . "// + "}";// final CountingResultHandler crh = new CountingResultHandler(); PcjIntegrationTestingUtil.deleteCoreRyaTables(accCon, tablePrefix); PcjIntegrationTestingUtil.closeAndShutdown(conn, repo); repo = PcjIntegrationTestingUtil.getPcjRepo(tablePrefix, "instance"); conn = repo.getConnection(); conn.add(sub, talksTo, obj); conn.add(sub2, talksTo, obj2); pcjConn.prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate( crh); Assert.assertEquals(1, crh.getCount()); } @Test public void testEvaluateSingeFilterWithUnion() throws TupleQueryResultHandlerException, QueryEvaluationException, MalformedQueryException, RepositoryException, AccumuloException, AccumuloSecurityException, TableExistsException, RyaDAOException, SailException, TableNotFoundException, PcjException, InferenceEngineException, NumberFormatException, UnknownHostException { final String indexSparqlString2 = ""// + "SELECT ?e ?l ?c " // + "{" // + " Filter(?l = \"label2\") " // + " ?e a ?c . "// + " ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l "// + "}";// PcjIntegrationTestingUtil.createAndPopulatePcj(conn, accCon, tablePrefix + "INDEX_2", indexSparqlString2, new String[] { "e", "l", "c" }, Optional.<PcjVarOrderFactory> absent()); final String queryString = ""// + "SELECT ?e ?c ?o ?m ?l" // + "{" // + " Filter(?l = \"label2\") " // + " ?e <uri:talksTo> ?o . "// + " { ?e a ?c . ?e <http://www.w3.org/2000/01/rdf-schema#label> ?m }"// + " UNION { ?e a ?c . ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l }"// + "}";// final CountingResultHandler crh = new CountingResultHandler(); PcjIntegrationTestingUtil.deleteCoreRyaTables(accCon, tablePrefix); PcjIntegrationTestingUtil.closeAndShutdown(conn, repo); repo = PcjIntegrationTestingUtil.getPcjRepo(tablePrefix, "instance"); conn = repo.getConnection(); conn.add(sub, talksTo, obj); conn.add(sub2, talksTo, obj2); pcjConn.prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate( crh); Assert.assertEquals(1, crh.getCount()); } @Test public void testEvaluateSingeFilterWithLeftJoin() throws TupleQueryResultHandlerException, QueryEvaluationException, MalformedQueryException, RepositoryException, AccumuloException, AccumuloSecurityException, TableExistsException, RyaDAOException, SailException, TableNotFoundException, PcjException, InferenceEngineException, NumberFormatException, UnknownHostException { final String indexSparqlString1 = ""// + "SELECT ?e ?l ?c " // + "{" // + " Filter(?l = \"label3\") " // + " ?e a ?c . "// + " ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l "// + "}";// final URI sub3 = new URIImpl("uri:entity3"); final URI subclass3 = new URIImpl("uri:class3"); conn.add(sub3, RDF.TYPE, subclass3); conn.add(sub3, RDFS.LABEL, new LiteralImpl("label3")); PcjIntegrationTestingUtil.createAndPopulatePcj(conn, accCon, tablePrefix + "INDEX_1", indexSparqlString1, new String[] { "e", "l", "c" }, Optional.<PcjVarOrderFactory> absent()); final String queryString = ""// + "SELECT ?e ?c ?o ?m ?l" // + "{" // + " Filter(?l = \"label3\") " // + " ?e a ?c . " // + " ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . " // + " OPTIONAL { ?e <uri:talksTo> ?o . ?e <http://www.w3.org/2000/01/rdf-schema#label> ?m }"// + "}";// final CountingResultHandler crh = new CountingResultHandler(); PcjIntegrationTestingUtil.deleteCoreRyaTables(accCon, tablePrefix); PcjIntegrationTestingUtil.closeAndShutdown(conn, repo); repo = PcjIntegrationTestingUtil.getPcjRepo(tablePrefix, "instance"); conn = repo.getConnection(); conn.add(sub, talksTo, obj); conn.add(sub, RDFS.LABEL, new LiteralImpl("label")); pcjConn.prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate( crh); Assert.assertEquals(1, crh.getCount()); } @Test public void testEvaluateTwoIndexUnionFilter() throws AccumuloException, AccumuloSecurityException, TableExistsException, RepositoryException, MalformedQueryException, SailException, QueryEvaluationException, TableNotFoundException, TupleQueryResultHandlerException, RyaDAOException, PcjException, InferenceEngineException, NumberFormatException, UnknownHostException { conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); conn.add(sub, RDF.TYPE, obj); conn.add(sub2, RDF.TYPE, obj2); final String indexSparqlString = ""// + "SELECT ?e ?l ?o " // + "{" // + " Filter(?l = \"label2\") " // + " ?e a ?o . "// + " ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l "// + "}";// final String indexSparqlString2 = ""// + "SELECT ?e ?l ?o " // + "{" // + " Filter(?l = \"label2\") " // + " ?e <uri:talksTo> ?o . "// + " ?o <http://www.w3.org/2000/01/rdf-schema#label> ?l "// + "}";// final String queryString = ""// + "SELECT ?c ?e ?l ?o " // + "{" // + " Filter(?l = \"label2\") " // + " ?e a ?c . "// + " { ?e a ?o . ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l }"// + " UNION { ?e <uri:talksTo> ?o . ?o <http://www.w3.org/2000/01/rdf-schema#label> ?l }"// + "}";// PcjIntegrationTestingUtil.createAndPopulatePcj(conn, accCon, tablePrefix + "INDEX_1", indexSparqlString, new String[] { "e", "l", "o" }, Optional.<PcjVarOrderFactory> absent()); PcjIntegrationTestingUtil.createAndPopulatePcj(conn, accCon, tablePrefix + "INDEX_2", indexSparqlString2, new String[] { "e", "l", "o" }, Optional.<PcjVarOrderFactory> absent()); PcjIntegrationTestingUtil.deleteCoreRyaTables(accCon, tablePrefix); PcjIntegrationTestingUtil.closeAndShutdown(conn, repo); repo = PcjIntegrationTestingUtil.getPcjRepo(tablePrefix, "instance"); conn = repo.getConnection(); conn.add(sub2, RDF.TYPE, subclass2); conn.add(sub2, RDF.TYPE, obj2); final CountingResultHandler crh = new CountingResultHandler(); pcjConn.prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate( crh); Assert.assertEquals(6, crh.getCount()); } @Test public void testEvaluateTwoIndexLeftJoinUnionFilter() throws AccumuloException, AccumuloSecurityException, TableExistsException, RepositoryException, MalformedQueryException, SailException, QueryEvaluationException, TableNotFoundException, TupleQueryResultHandlerException, RyaDAOException, PcjException, InferenceEngineException, NumberFormatException, UnknownHostException { conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); conn.add(sub, RDF.TYPE, obj); conn.add(sub2, RDF.TYPE, obj2); final URI livesIn = new URIImpl("uri:livesIn"); final URI city = new URIImpl("uri:city"); final URI city2 = new URIImpl("uri:city2"); final URI city3 = new URIImpl("uri:city3"); conn.add(sub, livesIn, city); conn.add(sub2, livesIn, city2); conn.add(sub2, livesIn, city3); conn.add(sub, livesIn, city3); final String indexSparqlString = ""// + "SELECT ?e ?l ?o " // + "{" // + " ?e a ?o . "// + " ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l "// + "}";// final String indexSparqlString2 = ""// + "SELECT ?e ?l ?o " // + "{" // + " ?e <uri:talksTo> ?o . "// + " ?o <http://www.w3.org/2000/01/rdf-schema#label> ?l "// + "}";// final String queryString = ""// + "SELECT ?c ?e ?l ?o " // + "{" // + " Filter(?c = <uri:city3>) " // + " ?e <uri:livesIn> ?c . "// + " OPTIONAL{{ ?e a ?o . ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l }"// + " UNION { ?e <uri:talksTo> ?o . ?o <http://www.w3.org/2000/01/rdf-schema#label> ?l }}"// + "}";// PcjIntegrationTestingUtil.createAndPopulatePcj(conn, accCon, tablePrefix + "INDEX_1", indexSparqlString, new String[] { "e", "l", "o" }, Optional.<PcjVarOrderFactory> absent()); PcjIntegrationTestingUtil.createAndPopulatePcj(conn, accCon, tablePrefix + "INDEX_2", indexSparqlString2, new String[] { "e", "l", "o" }, Optional.<PcjVarOrderFactory> absent()); PcjIntegrationTestingUtil.deleteCoreRyaTables(accCon, tablePrefix); PcjIntegrationTestingUtil.closeAndShutdown(conn, repo); repo = PcjIntegrationTestingUtil.getPcjRepo(tablePrefix, "instance"); conn = repo.getConnection(); conn.add(sub2, livesIn, city3); conn.add(sub, livesIn, city3); final CountingResultHandler crh = new CountingResultHandler(); pcjConn.prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate( crh); Assert.assertEquals(6, crh.getCount()); } public static class CountingResultHandler implements TupleQueryResultHandler { private int count = 0; public int getCount() { return count; } public void resetCount() { count = 0; } @Override public void startQueryResult(final List<String> arg0) throws TupleQueryResultHandlerException { } @Override public void handleSolution(final BindingSet arg0) throws TupleQueryResultHandlerException { System.out.println(arg0); count++; System.out.println("Count is " + count); } @Override public void endQueryResult() throws TupleQueryResultHandlerException { } @Override public void handleBoolean(final boolean arg0) throws QueryResultHandlerException { // TODO Auto-generated method stub } @Override public void handleLinks(final List<String> arg0) throws QueryResultHandlerException { // TODO Auto-generated method stub } } }
/* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.adejanovski.cassandra.jdbc; import java.sql.*; import java.util.*; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ConcurrentSkipListSet; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.datastax.driver.core.ConsistencyLevel; import com.datastax.driver.core.Host; import com.datastax.driver.core.Metadata; import com.datastax.driver.core.PreparedStatement; import com.datastax.driver.core.ProtocolOptions.Compression; import com.datastax.driver.core.Session; import com.datastax.driver.core.UserType; import com.google.common.collect.Maps; import static com.github.adejanovski.cassandra.jdbc.CassandraResultSet.*; import static com.github.adejanovski.cassandra.jdbc.Utils.*; /** * Implementation class for {@link Connection}. */ public class CassandraConnection extends AbstractConnection implements Connection { private static final Logger logger = LoggerFactory.getLogger(CassandraConnection.class); public static Integer roundRobinIndex; static final String IS_VALID_CQLQUERY_2_0_0 = "SELECT COUNT(1) FROM system.Versions WHERE component = 'cql';"; static final String IS_VALID_CQLQUERY_3_0_0 = "SELECT COUNT(1) FROM system.\"Versions\" WHERE component = 'cql';"; public static volatile int DB_MAJOR_VERSION = 1; public static volatile int DB_MINOR_VERSION = 2; public static volatile int DB_REVISION = 2; public static final String DB_PRODUCT_NAME = "Cassandra"; public static final String DEFAULT_CQL_VERSION = "3.0.0"; public ConcurrentMap<String, CassandraPreparedStatement> preparedStatements = Maps.newConcurrentMap(); public static Compression defaultCompression = Compression.LZ4; private final boolean autoCommit = true; private final int transactionIsolation = Connection.TRANSACTION_NONE; private final SessionHolder sessionHolder; /** * Connection Properties */ private Properties connectionProps; /** * Client Info Properties (currently unused) */ private Properties clientInfo = new Properties(); /** * Set of all Statements that have been created by this connection */ private Set<Statement> statements = new ConcurrentSkipListSet<Statement>(); private Session cSession; protected long timeOfLastFailure = 0; protected int numFailures = 0; protected String username = null; protected String url = null; public String cluster; protected String currentKeyspace; protected TreeSet<String> hostListPrimary; protected TreeSet<String> hostListBackup; int majorCqlVersion; private Metadata metadata; public boolean debugMode; private volatile boolean isClosed; PreparedStatement isAlive = null; //private String currentCqlVersion; public ConsistencyLevel defaultConsistencyLevel; /** * Instantiates a new CassandraConnection. * @param sessionHolder * @throws SQLException */ public CassandraConnection(SessionHolder sessionHolder) throws SQLException { this.sessionHolder = sessionHolder; Properties props = sessionHolder.properties; debugMode = props.getProperty(TAG_DEBUG, "").equals("true"); hostListPrimary = new TreeSet<String>(); hostListBackup = new TreeSet<String>(); connectionProps = (Properties)props.clone(); clientInfo = new Properties(); url = PROTOCOL + createSubName(props); currentKeyspace = props.getProperty(TAG_DATABASE_NAME); username = props.getProperty(TAG_USER, ""); String version = props.getProperty(TAG_CQL_VERSION, DEFAULT_CQL_VERSION); connectionProps.setProperty(TAG_ACTIVE_CQL_VERSION, version); majorCqlVersion = getMajor(version); defaultConsistencyLevel = ConsistencyLevel.valueOf(props.getProperty(TAG_CONSISTENCY_LEVEL, ConsistencyLevel.ONE.name())); cSession = sessionHolder.session; metadata = cSession.getCluster().getMetadata(); logger.info("Connected to cluster: %s\n", metadata.getClusterName()); for (Host aHost : metadata.getAllHosts()) { logger.info("Datacenter: %s; Host: %s; Rack: %s\n", aHost.getDatacenter(), aHost.getAddress(), aHost.getRack()); } Iterator<Host> hosts = metadata.getAllHosts().iterator(); if (hosts.hasNext()) { Host firstHost = hosts.next(); // TODO this is shared among all Connections, what if they belong to different clusters? CassandraConnection.DB_MAJOR_VERSION = firstHost.getCassandraVersion().getMajor(); CassandraConnection.DB_MINOR_VERSION = firstHost.getCassandraVersion().getMinor(); CassandraConnection.DB_REVISION = firstHost.getCassandraVersion().getPatch(); } } // get the Major portion of a string like : Major.minor.patch where 2 is the default @SuppressWarnings("boxing") private final int getMajor(String version) { int major = 0; String[] parts = version.split("\\."); try { major = Integer.valueOf(parts[0]); } catch (Exception e) { major = 2; } return major; } private final void checkNotClosed() throws SQLException { if (isClosed()) throw new SQLNonTransientConnectionException(WAS_CLOSED_CON); } public void clearWarnings() throws SQLException { // This implementation does not support the collection of warnings so clearing is a no-op // but it is still an exception to call this on a closed connection. checkNotClosed(); } /** * On close of connection. */ public void close() throws SQLException { sessionHolder.release(); isClosed = true; } public void commit() throws SQLException { checkNotClosed(); //throw new SQLFeatureNotSupportedException(ALWAYS_AUTOCOMMIT); } public java.sql.Statement createStatement() throws SQLException { checkNotClosed(); Statement statement = new CassandraStatement(this); statements.add(statement); return statement; } public Statement createStatement(int resultSetType, int resultSetConcurrency) throws SQLException { checkNotClosed(); Statement statement = new CassandraStatement(this, null, resultSetType, resultSetConcurrency); statements.add(statement); return statement; } public Statement createStatement(int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException { checkNotClosed(); Statement statement = new CassandraStatement(this, null, resultSetType, resultSetConcurrency, resultSetHoldability); statements.add(statement); return statement; } public boolean getAutoCommit() throws SQLException { checkNotClosed(); return autoCommit; } public Properties getConnectionProps() { return connectionProps; } public String getCatalog() throws SQLException { checkNotClosed(); return metadata.getClusterName(); } public void setSchema(String schema) throws SQLException { checkNotClosed(); currentKeyspace = schema; } public String getSchema() throws SQLException { checkNotClosed(); return currentKeyspace; } public Properties getClientInfo() throws SQLException { checkNotClosed(); return clientInfo; } public String getClientInfo(String label) throws SQLException { checkNotClosed(); return clientInfo.getProperty(label); } public int getHoldability() throws SQLException { checkNotClosed(); // the rationale is there are really no commits in Cassandra so no boundary... return DEFAULT_HOLDABILITY; } public DatabaseMetaData getMetaData() throws SQLException { checkNotClosed(); return new CassandraDatabaseMetaData(this); } public int getTransactionIsolation() throws SQLException { checkNotClosed(); return transactionIsolation; } public SQLWarning getWarnings() throws SQLException { checkNotClosed(); // the rationale is there are no warnings to return in this implementation... return null; } public boolean isClosed() throws SQLException { return isClosed; } public boolean isReadOnly() throws SQLException { checkNotClosed(); return false; } public boolean isValid(int timeout) throws SQLTimeoutException { if (timeout < 0) throw new SQLTimeoutException(BAD_TIMEOUT); // set timeout /* try { if (isClosed()) { return false; } if (isAlive == null) { isAlive = prepareStatement(currentCqlVersion == "2.0.0" ? IS_VALID_CQLQUERY_2_0_0 : IS_VALID_CQLQUERY_3_0_0); } // the result is not important isAlive.executeQuery().close(); } catch (SQLException e) { return false; } finally { // reset timeout socket.setTimeout(0); } */ return true; } public boolean isWrapperFor(Class<?> arg0) throws SQLException { return false; } public String nativeSQL(String sql) throws SQLException { checkNotClosed(); // the rationale is there are no distinction between grammars in this implementation... // so we are just return the input argument return sql; } public CassandraPreparedStatement prepareStatement(String cql) throws SQLException { CassandraPreparedStatement prepStmt = preparedStatements.get(cql); if(prepStmt==null){ // Statement didn't exist prepStmt = preparedStatements.putIfAbsent(cql, prepareStatement(cql,DEFAULT_TYPE,DEFAULT_CONCURRENCY,DEFAULT_HOLDABILITY)); if(prepStmt==null){ // Statement has already been created by another thread, so we'll just get it return preparedStatements.get(cql); } } return prepStmt; } public CassandraPreparedStatement prepareStatement(String cql, int rsType) throws SQLException { return prepareStatement(cql,rsType,DEFAULT_CONCURRENCY,DEFAULT_HOLDABILITY); } public CassandraPreparedStatement prepareStatement(String cql, int rsType, int rsConcurrency) throws SQLException { return prepareStatement(cql,rsType,rsConcurrency,DEFAULT_HOLDABILITY); } public CassandraPreparedStatement prepareStatement(String cql, int rsType, int rsConcurrency, int rsHoldability) throws SQLException { checkNotClosed(); CassandraPreparedStatement statement = new CassandraPreparedStatement(this, cql, rsType,rsConcurrency,rsHoldability); statements.add(statement); return statement; } public void rollback() throws SQLException { checkNotClosed(); throw new SQLFeatureNotSupportedException(ALWAYS_AUTOCOMMIT); } public void setAutoCommit(boolean autoCommit) throws SQLException { checkNotClosed(); //if (!autoCommit) throw new SQLFeatureNotSupportedException(ALWAYS_AUTOCOMMIT); } public void setCatalog(String arg0) throws SQLException { checkNotClosed(); // the rationale is there are no catalog name to set in this implementation... // so we are "silently ignoring" the request } public void setClientInfo(Properties props) throws SQLClientInfoException { // we don't use them but we will happily collect them for now... if (props != null) clientInfo = props; } public void setClientInfo(String key, String value) throws SQLClientInfoException { // we don't use them but we will happily collect them for now... clientInfo.setProperty(key, value); } public void setHoldability(int arg0) throws SQLException { checkNotClosed(); // the rationale is there are no holdability to set in this implementation... // so we are "silently ignoring" the request } public void setReadOnly(boolean arg0) throws SQLException { checkNotClosed(); // the rationale is all connections are read/write in the Cassandra implementation... // so we are "silently ignoring" the request } public void setTransactionIsolation(int level) throws SQLException { checkNotClosed(); if (level != Connection.TRANSACTION_NONE) throw new SQLFeatureNotSupportedException(NO_TRANSACTIONS); } public <T> T unwrap(Class<T> iface) throws SQLException { throw new SQLFeatureNotSupportedException(String.format(NO_INTERFACE, iface.getSimpleName())); } /** * Remove a Statement from the Open Statements List * @param statement * @return */ protected boolean removeStatement(Statement statement) { return statements.remove(statement); } public String toString() { StringBuilder builder = new StringBuilder(); builder.append("CassandraConnection [connectionProps="); builder.append(connectionProps); builder.append("]"); return builder.toString(); } public Session getSession(){ return this.cSession; } public Metadata getClusterMetadata(){ return metadata; } public Map<String, Class<?>> getTypeMap() throws SQLException { HashMap<String, Class<?>> typeMap = new HashMap<String, Class<?>>(); logger.info("current KS : " + currentKeyspace); Collection<UserType> types = this.metadata.getKeyspace(currentKeyspace).getUserTypes(); for(UserType type:types){ typeMap.put(type.getTypeName(), type.getClass()); } return typeMap; } }
package abra; import java.util.ArrayList; import java.util.List; import abra.ReadEvaluator.Alignment; public class CigarUtils { /** * Extract subset of cigar string based upon input position (index) into cigar and length. */ public static int subsetCigarString(int pos, int length, String cigar, StringBuffer newCigar) { List<CigarBlock> cigarBlocks = getCigarBlocks(cigar); List<CigarBlock> newCigarBlocks = new ArrayList<CigarBlock>(); int relativeRefPos = subsetCigarBlocks(cigarBlocks, pos, length, newCigarBlocks); for (CigarBlock block : newCigarBlocks) { newCigar.append(block.length); newCigar.append(block.type); } return relativeRefPos; } private static String cigarStringFromCigarBlocks(List<CigarBlock> blocks) { StringBuffer newCigar = new StringBuffer(); for (CigarBlock block : blocks) { newCigar.append(block.length); newCigar.append(block.type); } return newCigar.toString(); } public static String extendCigarWithMatches(String cigar, int leftPad, int rightPad) { List<CigarBlock> blocks = getCigarBlocks(cigar); if (blocks.get(0).type == 'M') { blocks.get(0).length += leftPad; } else { blocks.add(0, new CigarBlock(leftPad, 'M')); } int lastBlockIdx = blocks.size()-1; if (blocks.get(lastBlockIdx).type == 'M') { blocks.get(lastBlockIdx).length += rightPad; } else { blocks.add(new CigarBlock(rightPad, 'M')); } return cigarStringFromCigarBlocks(blocks); } public static String injectSplice(String cigar, int junctionPos, int junctionLength) { // Identify pos relative to reference and insert N element List<CigarBlock> blocks = getCigarBlocks(cigar); List<CigarBlock> newBlocks = new ArrayList<CigarBlock>(); int refPos = 0; for (CigarBlock block : blocks) { if (block.type == 'M' || block.type == 'D') { if (refPos < junctionPos && refPos + block.length >= junctionPos) { // Split up current block into 2 blocks with splice block in between int blockLen1 = junctionPos - refPos; int blockLen2 = block.length - blockLen1; newBlocks.add(new CigarBlock(blockLen1, block.type)); newBlocks.add(new CigarBlock(junctionLength, 'N')); if (blockLen2 > 0) { newBlocks.add(new CigarBlock(blockLen2, block.type)); } refPos += block.length; } else { newBlocks.add(block); refPos += block.length; } } else { // Do not advance ref pos for insertions or introns newBlocks.add(block); } } return cigarStringFromCigarBlocks(newBlocks); } // Assumes input junctions are sorted by coordinate public static String injectSplices(String cigar, List<Integer> junctionPos, List<Integer> junctionLength) { for (int i=0; i<junctionPos.size(); i++) { cigar = injectSplice(cigar, junctionPos.get(i), junctionLength.get(i)); } return cigar; } private static int selectPrimaryAlignment(Alignment alignment1, Alignment alignment2, int def) { int selection = def; if (!alignment1.isSecondary && alignment2.isSecondary) { selection = 1; } else if (alignment1.isSecondary && !alignment2.isSecondary) { selection = 2; } return selection; } // Return 0 if cigars are not equivalent (treating deletions the same as junctions) // Return 1 if cigar1 has more junctions // Return 2 if cigar2 has more junctions public static int testEquivalenceAndSelectIntronPreferred(Alignment alignment1, Alignment alignment2) { String cigar1 = alignment1.cigar; String cigar2 = alignment2.cigar; // Cigars are equal, pick non-secondary or just the first. if (cigar1.equals(cigar2)) { return (selectPrimaryAlignment(alignment1, alignment2, 1)); } // Cigars are different, pick non-secondary one or neither if (cigar1.length() != cigar2.length()) { return (selectPrimaryAlignment(alignment1, alignment2, 0)); } int cigar1Introns = 0; int cigar2Introns = 0; for (int i=0; i<cigar1.length(); i++) { char ch1 = cigar1.charAt(i); char ch2 = cigar2.charAt(i); if (ch1 != ch2) { if ((ch1 != 'N' && ch1 != 'D') || (ch2 != 'N' && ch2 != 'D')) { // Non-equivalent cigars return (selectPrimaryAlignment(alignment1, alignment2, 0)); } else { if (ch1 == 'N') { cigar1Introns += 1; } if (ch2 == 'N') { cigar2Introns += 1; } } } } if (cigar1Introns != cigar2Introns) { return cigar1Introns >= cigar2Introns ? 1 : 2; } // Equivalent cigars. Pick non-secondary or just the first. return (selectPrimaryAlignment(alignment1, alignment2, 0)); } /** * Returns true if cigar contains a deletion bracketed by introns */ public static boolean hasNDN(String cigar) { boolean hasNDM = false; List<CigarBlock> blocks = getCigarBlocks(cigar); for (int i=2; i<blocks.size(); i++) { if ((blocks.get(i).type == 'N') && (blocks.get(i-1).type == 'D') && (blocks.get(i-2).type == 'N')) { hasNDM = true; break; } } return hasNDM; } /** * Returns true if the input cigar string begins or ends with 2 adjacent indels. * Clipping is ignored. */ public static boolean startsOrEndsWithComplexIndel(String cigar) { boolean ret = false; List<CigarBlock> blocks = getUnclippedCigarBlocks(cigar); if (blocks.size() > 1) { if (blocks.get(0).isIndel() && blocks.get(1).isIndel()) { ret = true; } else if (blocks.get(blocks.size()-1).isIndel() && blocks.get(blocks.size()-2).isIndel()) { ret = true; } } return ret; } private static List<CigarBlock> getUnclippedCigarBlocks(String cigar) { List<CigarBlock> cigarBlocks = new ArrayList<CigarBlock>(); try { StringBuffer len = new StringBuffer(); for (int i=0; i<cigar.length(); i++) { char ch = cigar.charAt(i); if (Character.isDigit(ch)) { len.append(ch); } else { if (ch != 'H' && ch != 'S') { cigarBlocks.add(new CigarBlock(Integer.valueOf(len.toString()), ch)); } len.setLength(0); } } } catch (NumberFormatException e) { Logger.error("NumberFormatException: " + cigar); throw e; } return cigarBlocks; } private static List<CigarBlock> getCigarBlocks(String cigar) { List<CigarBlock> cigarBlocks = new ArrayList<CigarBlock>(); try { StringBuffer len = new StringBuffer(); for (int i=0; i<cigar.length(); i++) { char ch = cigar.charAt(i); if (Character.isDigit(ch)) { len.append(ch); } else { cigarBlocks.add(new CigarBlock(Integer.valueOf(len.toString()), ch)); len.setLength(0); } } } catch (NumberFormatException e) { Logger.error("NumberFormatException: " + cigar); throw e; } return cigarBlocks; } private static int subsetCigarBlocks(List<CigarBlock> contigBlocks, int pos, int readLength, List<CigarBlock> readBlocks) { int currLen = 0; int contigPos = 0; int relativeRefPos = 0; boolean isReadPosReached = false; // List<CigarBlock> readBlocks = new ArrayList<CigarBlock>(); for (CigarBlock block : contigBlocks) { int blockLength = block.length; // Identify the start point for subsetting if (!isReadPosReached) { if (!block.isGap()) { // Never start in a deletion if (contigPos + block.length >= pos) { blockLength = contigPos + block.length - pos; isReadPosReached = true; if (block.type != 'I') { // Include partial block length for matches relativeRefPos += block.length - blockLength; } } else { contigPos += block.length; if (block.type != 'I') { // Include entire block for matches relativeRefPos += block.length; } } } else { // Include entire block for deletes relativeRefPos += block.length; } } if (isReadPosReached && blockLength > 0) { if (block.isGap()) { // Never start in a deletion if (!readBlocks.isEmpty()) { readBlocks.add(block); } else { // skip over leading deletion in reference position relativeRefPos += block.length; } } else if (blockLength < readLength-currLen) { currLen += blockLength; readBlocks.add(new CigarBlock(blockLength, block.type)); } else { int len = readLength - currLen; currLen += len; readBlocks.add(new CigarBlock(len, block.type)); break; } } } return relativeRefPos; } static class CigarBlock { int length; char type; CigarBlock(int length, char type) { this.length = length; this.type = type; } boolean isGap() { return type == 'D' || type == 'N'; } boolean isIndel() { return type == 'D' || type == 'I'; } } }
/* * Copyright 2010 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hs.mail.web.controller; import java.io.IOException; import java.util.Arrays; import java.util.List; import javax.security.auth.callback.CallbackHandler; import javax.security.auth.login.LoginContext; import javax.security.auth.login.LoginException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.log4j.Logger; import org.springframework.web.multipart.MultipartFile; import org.springframework.web.multipart.MultipartHttpServletRequest; import org.springframework.web.servlet.ModelAndView; import org.springframework.web.servlet.mvc.multiaction.MultiActionController; import org.springframework.web.servlet.view.RedirectView; import com.hs.mail.container.config.Config; import com.hs.mail.imap.user.Alias; import com.hs.mail.imap.user.User; import com.hs.mail.imap.user.UserManager; import com.hs.mail.security.login.BasicCallbackHandler; import com.hs.mail.web.WebSession; import com.hs.mail.web.exception.KeyedException; import com.hs.mail.web.exception.SessionRequiredException; import com.hs.mail.web.util.DataImporter; import com.hs.mail.web.util.Pager; import com.hs.mail.web.util.RequestUtils; /** * * @author Won Chul Doh * @since Sep 1, 2010 * */ public class WebConsole extends MultiActionController { // logging private final Logger logger = Logger.getLogger(getClass()); private UserManager manager; public void setUserManager(UserManager userManager) { this.manager = userManager; } public ModelAndView handleRequestInternal(HttpServletRequest request, HttpServletResponse response) throws Exception { RequestUtils.debug(request); return super.handleRequestInternal(request, response); } /** * Dispatchers ************************************************ */ public ModelAndView session(HttpServletRequest request, HttpServletResponse response) throws Exception { WebSession session = new WebSession(request, response); String dome = request.getParameter("todo"); return doDispatchSessionActions(session, request, response, dome); } public ModelAndView account(HttpServletRequest request, HttpServletResponse response) throws Exception { WebSession session = new WebSession(request, response); if (!session.isValid()) { throw new SessionRequiredException("session.required"); } String dome = request.getParameter("todo"); return doDispatchAccountActions(session, request, response, dome); } public ModelAndView alias(HttpServletRequest request, HttpServletResponse response) throws Exception { WebSession session = new WebSession(request, response); if (!session.isValid()) { throw new SessionRequiredException("session.required"); } String dome = request.getParameter("todo"); return doDispatchAliasActions(session, request, response, dome); } /** * End Dispatchers ************************************************ */ /** * Session Actions ************************************************ */ /** * Dispatches actions targeting a <tt>session</tt>. * * @param session * a <tt>WebSession</tt> instance * @param request * a reference to the actual <tt>HttpServletRequest</tt> instance * @param response * a reference to the actual <tt>HttpServletResponse</tt> * instance * @param dome * the task as <tt>String</tt> * @throws Exception * if it fails to dispatch the request to a method (i.e. invalid * request), or the action method fails to execute the task. * */ private ModelAndView doDispatchSessionActions(WebSession session, HttpServletRequest request, HttpServletResponse response, String dome) throws Exception { if ("login".equals(dome)) { String username = request.getParameter("username"); String password = request.getParameter("password"); String facility = request.getParameter("facility"); return doLogin(session, username, password, facility); } else if ("logout".equals(dome)) { return doLogout(session, request); } else { throw new KeyedException("parameter.todo.invalid"); } } private ModelAndView doLogin(WebSession session, String username, String password, String facility) { try { CallbackHandler callbackHandler = new BasicCallbackHandler( username, password.toCharArray()); LoginContext lc = new LoginContext(facility, callbackHandler); lc.login(); session.storeBean(WebSession.LOGIN_CONTEXT, lc); List<String> domains = Arrays.asList(Config.getDomains()); ModelAndView mav = new ModelAndView("console"); mav.addObject("domains", domains); return mav; } catch (LoginException e) { logger.error(e.getMessage(), e); return new ModelAndView("index", "error", "incorrect.password"); } } private ModelAndView doLogout(WebSession session, HttpServletRequest request) { try { LoginContext lc = (LoginContext) session .retrieveBean(WebSession.LOGIN_CONTEXT); lc.logout(); session.removeBean(WebSession.LOGIN_CONTEXT); } catch (LoginException e) { logger.error(e.getMessage(), e); } return new ModelAndView("index"); } /*** End Session Actions ************************************************ */ /** * Account Actions ************************************************ */ private ModelAndView doDispatchAccountActions(WebSession session, HttpServletRequest request, HttpServletResponse response, String dome) throws Exception { if ("display".equals(dome)) { String domain = RequestUtils.getParameter(request, "domain"); if (domain == null) { throw new KeyedException("parameter.missing.domain"); } int page = RequestUtils.getParameterInt(request, "page", 1); int pageSize = RequestUtils.getParameterInt(request, "pageSize", 12); return doDisplayAccounts(session, domain, page, pageSize); } else if ("delete".equals(dome)) { String domain = RequestUtils.getParameter(request, "domain"); if (domain == null) { throw new KeyedException("parameter.missing.domain"); } long[] idarray = RequestUtils.getParameterLongs(request, "IDs"); if (idarray == null) { idarray = RequestUtils.getParameterLongs(request, "ID"); if (idarray == null) { throw new KeyedException("parameter.account.missinguid"); } } return doDeleteAccounts(session, domain, idarray); } else if ("empty".equals(dome)) { long id = RequestUtils.getParameterLong(request, "ID", 0); if (id == 0) { throw new KeyedException("parameter.account.missinguid"); } return doEmptyAccount(session, id); } else if ("doimport".equals(dome)) { return doImportAccounts(session); } else if ("import".equals(dome)) { return doDisplayImportAccounts(); } else { throw new KeyedException("parameter.todo.invalid"); } } private ModelAndView doDisplayAccounts(WebSession session, String domain, int page, int pageSize) { Integer count = (Integer) session.retrieveBean(domain + WebSession.ACCOUNT_COUNT); if (count == null) { count = manager.getUserCount(domain); session.storeBean(domain + WebSession.ACCOUNT_COUNT, count); } Pager pager = new Pager(page, pageSize, count, true); List<User> users = null; if (count > 0) { users = manager.getUserList(domain, page, pageSize); } ModelAndView mav = new ModelAndView("accountlist"); mav.addObject("session", session); mav.addObject("users", users); mav.addObject("pager", pager); return mav; } private ModelAndView doDeleteAccounts(WebSession session, String domain, long[] idarray) { session.removeBean(domain + WebSession.ACCOUNT_COUNT); for (int i = 0; i < idarray.length; i++) { try { manager.deleteUser(idarray[i]); } catch (Exception e) { logger.error(e.getMessage(), e); } } return getRedirectView(session); } private ModelAndView doEmptyAccount(WebSession session, long id) { manager.emptyUser(id); return getRedirectView(session); } private ModelAndView doDisplayImportAccounts() { return new ModelAndView("import"); } private ModelAndView doImportAccounts(WebSession session) { MultipartHttpServletRequest multi = (MultipartHttpServletRequest) session .getRequest(); DataImporter importer = new DataImporter(); MultipartFile mf = multi.getFile("file"); if (mf != null) { try { session.removeBeans(WebSession.ACCOUNT_COUNT); importer.importAccount(manager, mf.getInputStream()); } catch (IOException e) { importer.addError(0, mf.getOriginalFilename(), e); } } if (importer.hasErrors()) { return new ModelAndView("importerror", "errors", importer .getErrors()); } else { return getRedirectView(session); } } /*** End Account Actions ************************************************ */ /*** Alias Actions ************************************************ */ private ModelAndView doDispatchAliasActions(WebSession session, HttpServletRequest request, HttpServletResponse response, String dome) throws Exception { if ("display".equals(dome)) { String domain = RequestUtils.getParameter(request, "domain"); if (domain == null) { throw new KeyedException("parameter.missing.domain"); } int page = RequestUtils.getParameterInt(request, "page", 1); int pageSize = RequestUtils.getParameterInt(request, "pageSize", 12); return doDisplayAliases(session, domain, page, pageSize); } else if ("delete".equals(dome)) { String domain = RequestUtils.getParameter(request, "domain"); if (domain == null) { throw new KeyedException("parameter.missing.domain"); } long[] idarray = RequestUtils.getParameterLongs(request, "IDs"); if (idarray == null) { idarray = RequestUtils.getParameterLongs(request, "ID"); if (idarray == null) { throw new KeyedException("parameter.alias.missinguid"); } } return doDeleteAliases(session, domain, idarray); } else { throw new KeyedException("parameter.todo.invalid"); } } private ModelAndView doDisplayAliases(WebSession session, String domain, int page, int pageSize) { Integer count = (Integer) session.retrieveBean(domain + WebSession.ALIAS_COUNT); if (count == null) { count = manager.getAliasCount(domain); session.storeBean(domain + WebSession.ALIAS_COUNT, count); } Pager pager = new Pager(page, pageSize, count, true); List<Alias> aliases = null; if (count > 0) { aliases = manager.getAliasList(domain, page, pageSize); } ModelAndView mav = new ModelAndView("aliaslist"); mav.addObject("session", session); mav.addObject("aliases", aliases); mav.addObject("pager", pager); return mav; } private ModelAndView doDeleteAliases(WebSession session, String domain, long[] uids) { session.removeBean(domain + WebSession.ALIAS_COUNT); for (int i = 0; i < uids.length; i++) { try { manager.deleteAlias(uids[i]); } catch (Exception e) { logger.error(e.getMessage(), e); } } return getRedirectView(session); } /*** End Alias Actions ************************************************ */ /*** Helper methods ****************************************************/ private ModelAndView getRedirectView(WebSession session) { return new ModelAndView(new RedirectView(RequestUtils.getParameter( session.getRequest(), "returl"))); } }
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.simpleemail.model; import java.io.Serializable; import javax.annotation.Generated; /** * <p> * Represents sending statistics data. Each <code>SendDataPoint</code> contains statistics for a 15-minute period of * sending activity. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/email-2010-12-01/SendDataPoint" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class SendDataPoint implements Serializable, Cloneable { /** * <p> * Time of the data point. * </p> */ private java.util.Date timestamp; /** * <p> * Number of emails that have been sent. * </p> */ private Long deliveryAttempts; /** * <p> * Number of emails that have bounced. * </p> */ private Long bounces; /** * <p> * Number of unwanted emails that were rejected by recipients. * </p> */ private Long complaints; /** * <p> * Number of emails rejected by Amazon SES. * </p> */ private Long rejects; /** * <p> * Time of the data point. * </p> * * @param timestamp * Time of the data point. */ public void setTimestamp(java.util.Date timestamp) { this.timestamp = timestamp; } /** * <p> * Time of the data point. * </p> * * @return Time of the data point. */ public java.util.Date getTimestamp() { return this.timestamp; } /** * <p> * Time of the data point. * </p> * * @param timestamp * Time of the data point. * @return Returns a reference to this object so that method calls can be chained together. */ public SendDataPoint withTimestamp(java.util.Date timestamp) { setTimestamp(timestamp); return this; } /** * <p> * Number of emails that have been sent. * </p> * * @param deliveryAttempts * Number of emails that have been sent. */ public void setDeliveryAttempts(Long deliveryAttempts) { this.deliveryAttempts = deliveryAttempts; } /** * <p> * Number of emails that have been sent. * </p> * * @return Number of emails that have been sent. */ public Long getDeliveryAttempts() { return this.deliveryAttempts; } /** * <p> * Number of emails that have been sent. * </p> * * @param deliveryAttempts * Number of emails that have been sent. * @return Returns a reference to this object so that method calls can be chained together. */ public SendDataPoint withDeliveryAttempts(Long deliveryAttempts) { setDeliveryAttempts(deliveryAttempts); return this; } /** * <p> * Number of emails that have bounced. * </p> * * @param bounces * Number of emails that have bounced. */ public void setBounces(Long bounces) { this.bounces = bounces; } /** * <p> * Number of emails that have bounced. * </p> * * @return Number of emails that have bounced. */ public Long getBounces() { return this.bounces; } /** * <p> * Number of emails that have bounced. * </p> * * @param bounces * Number of emails that have bounced. * @return Returns a reference to this object so that method calls can be chained together. */ public SendDataPoint withBounces(Long bounces) { setBounces(bounces); return this; } /** * <p> * Number of unwanted emails that were rejected by recipients. * </p> * * @param complaints * Number of unwanted emails that were rejected by recipients. */ public void setComplaints(Long complaints) { this.complaints = complaints; } /** * <p> * Number of unwanted emails that were rejected by recipients. * </p> * * @return Number of unwanted emails that were rejected by recipients. */ public Long getComplaints() { return this.complaints; } /** * <p> * Number of unwanted emails that were rejected by recipients. * </p> * * @param complaints * Number of unwanted emails that were rejected by recipients. * @return Returns a reference to this object so that method calls can be chained together. */ public SendDataPoint withComplaints(Long complaints) { setComplaints(complaints); return this; } /** * <p> * Number of emails rejected by Amazon SES. * </p> * * @param rejects * Number of emails rejected by Amazon SES. */ public void setRejects(Long rejects) { this.rejects = rejects; } /** * <p> * Number of emails rejected by Amazon SES. * </p> * * @return Number of emails rejected by Amazon SES. */ public Long getRejects() { return this.rejects; } /** * <p> * Number of emails rejected by Amazon SES. * </p> * * @param rejects * Number of emails rejected by Amazon SES. * @return Returns a reference to this object so that method calls can be chained together. */ public SendDataPoint withRejects(Long rejects) { setRejects(rejects); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getTimestamp() != null) sb.append("Timestamp: ").append(getTimestamp()).append(","); if (getDeliveryAttempts() != null) sb.append("DeliveryAttempts: ").append(getDeliveryAttempts()).append(","); if (getBounces() != null) sb.append("Bounces: ").append(getBounces()).append(","); if (getComplaints() != null) sb.append("Complaints: ").append(getComplaints()).append(","); if (getRejects() != null) sb.append("Rejects: ").append(getRejects()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof SendDataPoint == false) return false; SendDataPoint other = (SendDataPoint) obj; if (other.getTimestamp() == null ^ this.getTimestamp() == null) return false; if (other.getTimestamp() != null && other.getTimestamp().equals(this.getTimestamp()) == false) return false; if (other.getDeliveryAttempts() == null ^ this.getDeliveryAttempts() == null) return false; if (other.getDeliveryAttempts() != null && other.getDeliveryAttempts().equals(this.getDeliveryAttempts()) == false) return false; if (other.getBounces() == null ^ this.getBounces() == null) return false; if (other.getBounces() != null && other.getBounces().equals(this.getBounces()) == false) return false; if (other.getComplaints() == null ^ this.getComplaints() == null) return false; if (other.getComplaints() != null && other.getComplaints().equals(this.getComplaints()) == false) return false; if (other.getRejects() == null ^ this.getRejects() == null) return false; if (other.getRejects() != null && other.getRejects().equals(this.getRejects()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getTimestamp() == null) ? 0 : getTimestamp().hashCode()); hashCode = prime * hashCode + ((getDeliveryAttempts() == null) ? 0 : getDeliveryAttempts().hashCode()); hashCode = prime * hashCode + ((getBounces() == null) ? 0 : getBounces().hashCode()); hashCode = prime * hashCode + ((getComplaints() == null) ? 0 : getComplaints().hashCode()); hashCode = prime * hashCode + ((getRejects() == null) ? 0 : getRejects().hashCode()); return hashCode; } @Override public SendDataPoint clone() { try { return (SendDataPoint) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.types.extraction; import org.apache.flink.table.annotation.DataTypeHint; import org.apache.flink.table.annotation.FunctionHint; import org.apache.flink.table.annotation.InputGroup; import org.apache.flink.table.api.DataTypes; import org.apache.flink.table.api.ValidationException; import org.apache.flink.table.data.RowData; import org.apache.flink.table.functions.AggregateFunction; import org.apache.flink.table.functions.ScalarFunction; import org.apache.flink.table.functions.TableAggregateFunction; import org.apache.flink.table.functions.TableFunction; import org.apache.flink.table.types.DataType; import org.apache.flink.table.types.inference.ArgumentTypeStrategy; import org.apache.flink.table.types.inference.InputTypeStrategies; import org.apache.flink.table.types.inference.InputTypeStrategy; import org.apache.flink.table.types.inference.TypeInference; import org.apache.flink.table.types.inference.TypeStrategies; import org.apache.flink.table.types.inference.TypeStrategy; import org.apache.flink.table.types.utils.DataTypeFactoryMock; import org.apache.flink.types.Row; import org.hamcrest.Matcher; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameter; import org.junit.runners.Parameterized.Parameters; import javax.annotation.Nullable; import java.util.Arrays; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.function.Supplier; import static org.apache.flink.core.testutils.FlinkMatchers.containsCause; import static org.hamcrest.CoreMatchers.equalTo; import static org.junit.Assert.assertThat; /** Tests for {@link TypeInferenceExtractor}. */ @RunWith(Parameterized.class) @SuppressWarnings("unused") public class TypeInferenceExtractorTest { @Parameters(name = "{index}: {0}") public static List<TestSpec> testData() { return Arrays.asList( // function hint defines everything TestSpec.forScalarFunction(FullFunctionHint.class) .expectNamedArguments("i", "s") .expectTypedArguments(DataTypes.INT(), DataTypes.STRING()) .expectOutputMapping( InputTypeStrategies.sequence( new String[] {"i", "s"}, new ArgumentTypeStrategy[] { InputTypeStrategies.explicit(DataTypes.INT()), InputTypeStrategies.explicit(DataTypes.STRING()) }), TypeStrategies.explicit(DataTypes.BOOLEAN())), // function hint defines everything with overloading TestSpec.forScalarFunction(FullFunctionHints.class) .expectOutputMapping( InputTypeStrategies.sequence( InputTypeStrategies.explicit(DataTypes.INT())), TypeStrategies.explicit(DataTypes.INT())) .expectOutputMapping( InputTypeStrategies.sequence( InputTypeStrategies.explicit(DataTypes.BIGINT())), TypeStrategies.explicit(DataTypes.BIGINT())), // global output hint with local input overloading TestSpec.forScalarFunction(GlobalOutputFunctionHint.class) .expectOutputMapping( InputTypeStrategies.sequence( InputTypeStrategies.explicit(DataTypes.INT())), TypeStrategies.explicit(DataTypes.INT())) .expectOutputMapping( InputTypeStrategies.sequence( InputTypeStrategies.explicit(DataTypes.STRING())), TypeStrategies.explicit(DataTypes.INT())), // unsupported output overloading TestSpec.forScalarFunction(InvalidSingleOutputFunctionHint.class) .expectErrorMessage( "Function hints that lead to ambiguous results are not allowed."), // global and local overloading TestSpec.forScalarFunction(SplitFullFunctionHints.class) .expectOutputMapping( InputTypeStrategies.sequence( InputTypeStrategies.explicit(DataTypes.INT())), TypeStrategies.explicit(DataTypes.INT())) .expectOutputMapping( InputTypeStrategies.sequence( InputTypeStrategies.explicit(DataTypes.BIGINT())), TypeStrategies.explicit(DataTypes.BIGINT())), // global and local overloading with unsupported output overloading TestSpec.forScalarFunction(InvalidFullOutputFunctionHint.class) .expectErrorMessage( "Function hints with same input definition but different result types are not allowed."), // ignore argument names during overloading TestSpec.forScalarFunction(InvalidFullOutputFunctionWithArgNamesHint.class) .expectErrorMessage( "Function hints with same input definition but different result types are not allowed."), // invalid data type hint TestSpec.forScalarFunction(IncompleteFunctionHint.class) .expectErrorMessage( "Data type hint does neither specify a data type nor input group for use as function argument."), // varargs and ANY input group TestSpec.forScalarFunction(ComplexFunctionHint.class) .expectOutputMapping( InputTypeStrategies.varyingSequence( new String[] {"myInt", "myAny"}, new ArgumentTypeStrategy[] { InputTypeStrategies.explicit( DataTypes.ARRAY(DataTypes.INT())), InputTypeStrategies.ANY }), TypeStrategies.explicit(DataTypes.BOOLEAN())), // global input hints and local output hints TestSpec.forScalarFunction(GlobalInputFunctionHints.class) .expectOutputMapping( InputTypeStrategies.sequence( InputTypeStrategies.explicit(DataTypes.INT())), TypeStrategies.explicit(DataTypes.INT())) .expectOutputMapping( InputTypeStrategies.sequence( InputTypeStrategies.explicit(DataTypes.BIGINT())), TypeStrategies.explicit(DataTypes.INT())), // no arguments TestSpec.forScalarFunction(ZeroArgFunction.class) .expectNamedArguments() .expectTypedArguments() .expectOutputMapping( InputTypeStrategies.sequence( new String[0], new ArgumentTypeStrategy[0]), TypeStrategies.explicit(DataTypes.INT())), // test primitive arguments extraction TestSpec.forScalarFunction(MixedArgFunction.class) .expectNamedArguments("i", "d") .expectTypedArguments( DataTypes.INT().notNull().bridgedTo(int.class), DataTypes.DOUBLE()) .expectOutputMapping( InputTypeStrategies.sequence( new String[] {"i", "d"}, new ArgumentTypeStrategy[] { InputTypeStrategies.explicit( DataTypes.INT().notNull().bridgedTo(int.class)), InputTypeStrategies.explicit(DataTypes.DOUBLE()) }), TypeStrategies.explicit(DataTypes.INT())), // test overloaded arguments extraction TestSpec.forScalarFunction(OverloadedFunction.class) .expectOutputMapping( InputTypeStrategies.sequence( new String[] {"i", "d"}, new ArgumentTypeStrategy[] { InputTypeStrategies.explicit( DataTypes.INT().notNull().bridgedTo(int.class)), InputTypeStrategies.explicit(DataTypes.DOUBLE()) }), TypeStrategies.explicit(DataTypes.INT())) .expectOutputMapping( InputTypeStrategies.sequence( new String[] {"s"}, new ArgumentTypeStrategy[] { InputTypeStrategies.explicit(DataTypes.STRING()) }), TypeStrategies.explicit( DataTypes.BIGINT().notNull().bridgedTo(long.class))), // test varying arguments extraction TestSpec.forScalarFunction(VarArgFunction.class) .expectOutputMapping( InputTypeStrategies.varyingSequence( new String[] {"i", "more"}, new ArgumentTypeStrategy[] { InputTypeStrategies.explicit( DataTypes.INT().notNull().bridgedTo(int.class)), InputTypeStrategies.explicit( DataTypes.INT().notNull().bridgedTo(int.class)) }), TypeStrategies.explicit(DataTypes.STRING())), // test varying arguments extraction with byte TestSpec.forScalarFunction(VarArgWithByteFunction.class) .expectOutputMapping( InputTypeStrategies.varyingSequence( new String[] {"bytes"}, new ArgumentTypeStrategy[] { InputTypeStrategies.explicit( DataTypes.TINYINT() .notNull() .bridgedTo(byte.class)) }), TypeStrategies.explicit(DataTypes.STRING())), // output hint with input extraction TestSpec.forScalarFunction(ExtractWithOutputHintFunction.class) .expectNamedArguments("i") .expectTypedArguments(DataTypes.INT()) .expectOutputMapping( InputTypeStrategies.sequence( new String[] {"i"}, new ArgumentTypeStrategy[] { InputTypeStrategies.explicit(DataTypes.INT()) }), TypeStrategies.explicit(DataTypes.INT())), // output extraction with input hints TestSpec.forScalarFunction(ExtractWithInputHintFunction.class) .expectNamedArguments("i", "b") .expectTypedArguments(DataTypes.INT(), DataTypes.BOOLEAN()) .expectOutputMapping( InputTypeStrategies.sequence( new String[] {"i", "b"}, new ArgumentTypeStrategy[] { InputTypeStrategies.explicit(DataTypes.INT()), InputTypeStrategies.explicit(DataTypes.BOOLEAN()) }), TypeStrategies.explicit( DataTypes.DOUBLE().notNull().bridgedTo(double.class))), // different accumulator depending on input TestSpec.forAggregateFunction(InputDependentAccumulatorFunction.class) .expectAccumulatorMapping( InputTypeStrategies.sequence( InputTypeStrategies.explicit(DataTypes.BIGINT())), TypeStrategies.explicit( DataTypes.ROW(DataTypes.FIELD("f", DataTypes.BIGINT())))) .expectAccumulatorMapping( InputTypeStrategies.sequence( InputTypeStrategies.explicit(DataTypes.STRING())), TypeStrategies.explicit( DataTypes.ROW(DataTypes.FIELD("f", DataTypes.STRING())))) .expectOutputMapping( InputTypeStrategies.sequence( InputTypeStrategies.explicit(DataTypes.BIGINT())), TypeStrategies.explicit(DataTypes.STRING())) .expectOutputMapping( InputTypeStrategies.sequence( InputTypeStrategies.explicit(DataTypes.STRING())), TypeStrategies.explicit(DataTypes.STRING())), // input, accumulator, and output are spread across the function TestSpec.forAggregateFunction(AggregateFunctionWithManyAnnotations.class) .expectNamedArguments("r") .expectTypedArguments( DataTypes.ROW( DataTypes.FIELD("i", DataTypes.INT()), DataTypes.FIELD("b", DataTypes.BOOLEAN()))) .expectAccumulatorMapping( InputTypeStrategies.sequence( new String[] {"r"}, new ArgumentTypeStrategy[] { InputTypeStrategies.explicit( DataTypes.ROW( DataTypes.FIELD("i", DataTypes.INT()), DataTypes.FIELD( "b", DataTypes.BOOLEAN()))) }), TypeStrategies.explicit( DataTypes.ROW(DataTypes.FIELD("b", DataTypes.BOOLEAN())))) .expectOutputMapping( InputTypeStrategies.sequence( new String[] {"r"}, new ArgumentTypeStrategy[] { InputTypeStrategies.explicit( DataTypes.ROW( DataTypes.FIELD("i", DataTypes.INT()), DataTypes.FIELD( "b", DataTypes.BOOLEAN()))) }), TypeStrategies.explicit(DataTypes.STRING())), // test for table functions TestSpec.forTableFunction(OutputHintTableFunction.class) .expectNamedArguments("i") .expectTypedArguments(DataTypes.INT().notNull().bridgedTo(int.class)) .expectOutputMapping( InputTypeStrategies.sequence( new String[] {"i"}, new ArgumentTypeStrategy[] { InputTypeStrategies.explicit( DataTypes.INT().notNull().bridgedTo(int.class)) }), TypeStrategies.explicit( DataTypes.ROW( DataTypes.FIELD("i", DataTypes.INT()), DataTypes.FIELD("b", DataTypes.BOOLEAN())))), // mismatch between hints and implementation regarding return type TestSpec.forScalarFunction(InvalidMethodScalarFunction.class) .expectErrorMessage( "Considering all hints, the method should comply with the signature:\n" + "java.lang.String eval(int[])"), // mismatch between hints and implementation regarding accumulator TestSpec.forAggregateFunction(InvalidMethodAggregateFunction.class) .expectErrorMessage( "Considering all hints, the method should comply with the signature:\n" + "accumulate(java.lang.Integer, int, boolean)"), // no implementation TestSpec.forTableFunction(MissingMethodTableFunction.class) .expectErrorMessage( "Could not find a publicly accessible method named 'eval'."), // named arguments with overloaded function TestSpec.forScalarFunction(NamedArgumentsScalarFunction.class) .expectNamedArguments("n"), // scalar function that takes any input TestSpec.forScalarFunction(InputGroupScalarFunction.class) .expectNamedArguments("o") .expectOutputMapping( InputTypeStrategies.sequence( new String[] {"o"}, new ArgumentTypeStrategy[] {InputTypeStrategies.ANY}), TypeStrategies.explicit(DataTypes.STRING())), // scalar function that takes any input as vararg TestSpec.forScalarFunction(VarArgInputGroupScalarFunction.class) .expectOutputMapping( InputTypeStrategies.varyingSequence( new String[] {"o"}, new ArgumentTypeStrategy[] {InputTypeStrategies.ANY}), TypeStrategies.explicit(DataTypes.STRING())), TestSpec.forScalarFunction( "Scalar function with implicit overloading order", OrderedScalarFunction.class) .expectOutputMapping( InputTypeStrategies.sequence( new String[] {"i"}, new ArgumentTypeStrategy[] { InputTypeStrategies.explicit(DataTypes.INT()) }), TypeStrategies.explicit(DataTypes.INT())) .expectOutputMapping( InputTypeStrategies.sequence( new String[] {"l"}, new ArgumentTypeStrategy[] { InputTypeStrategies.explicit(DataTypes.BIGINT()) }), TypeStrategies.explicit(DataTypes.BIGINT())), TestSpec.forScalarFunction( "Scalar function with explicit overloading order by class annotations", OrderedScalarFunction2.class) .expectOutputMapping( InputTypeStrategies.sequence( InputTypeStrategies.explicit(DataTypes.BIGINT())), TypeStrategies.explicit(DataTypes.BIGINT())) .expectOutputMapping( InputTypeStrategies.sequence( InputTypeStrategies.explicit(DataTypes.INT())), TypeStrategies.explicit(DataTypes.INT())), TestSpec.forScalarFunction( "Scalar function with explicit overloading order by method annotations", OrderedScalarFunction3.class) .expectOutputMapping( InputTypeStrategies.sequence( InputTypeStrategies.explicit(DataTypes.BIGINT())), TypeStrategies.explicit(DataTypes.BIGINT())) .expectOutputMapping( InputTypeStrategies.sequence( InputTypeStrategies.explicit(DataTypes.INT())), TypeStrategies.explicit(DataTypes.INT())), TestSpec.forTableFunction( "A data type hint on the class is used instead of a function output hint", DataTypeHintOnTableFunctionClass.class) .expectNamedArguments() .expectTypedArguments() .expectOutputMapping( InputTypeStrategies.sequence( new String[] {}, new ArgumentTypeStrategy[] {}), TypeStrategies.explicit( DataTypes.ROW(DataTypes.FIELD("i", DataTypes.INT())))), TestSpec.forTableFunction( "A data type hint on the method is used instead of a function output hint", DataTypeHintOnTableFunctionMethod.class) .expectNamedArguments("i") .expectTypedArguments(DataTypes.INT()) .expectOutputMapping( InputTypeStrategies.sequence( new String[] {"i"}, new ArgumentTypeStrategy[] { InputTypeStrategies.explicit(DataTypes.INT()) }), TypeStrategies.explicit( DataTypes.ROW(DataTypes.FIELD("i", DataTypes.INT())))), TestSpec.forTableFunction( "Invalid data type hint on top of method and class", InvalidDataTypeHintOnTableFunction.class) .expectErrorMessage( "More than one data type hint found for output of function. " + "Please use a function hint instead."), TestSpec.forScalarFunction( "A data type hint on the method is used for enriching (not a function output hint)", DataTypeHintOnScalarFunction.class) .expectNamedArguments() .expectTypedArguments() .expectOutputMapping( InputTypeStrategies.sequence( new String[] {}, new ArgumentTypeStrategy[] {}), TypeStrategies.explicit( DataTypes.ROW(DataTypes.FIELD("i", DataTypes.INT())) .bridgedTo(RowData.class)))); } @Parameter public TestSpec testSpec; @Rule public ExpectedException thrown = ExpectedException.none(); @Test public void testArgumentNames() { if (testSpec.expectedArgumentNames != null) { assertThat( testSpec.typeInferenceExtraction.get().getNamedArguments(), equalTo(Optional.of(testSpec.expectedArgumentNames))); } else if (testSpec.expectedErrorMessage == null) { assertThat( testSpec.typeInferenceExtraction.get().getNamedArguments(), equalTo(Optional.empty())); } } @Test public void testArgumentTypes() { if (testSpec.expectedArgumentTypes != null) { assertThat( testSpec.typeInferenceExtraction.get().getTypedArguments(), equalTo(Optional.of(testSpec.expectedArgumentTypes))); } else if (testSpec.expectedErrorMessage == null) { assertThat( testSpec.typeInferenceExtraction.get().getTypedArguments(), equalTo(Optional.empty())); } } @Test public void testInputTypeStrategy() { if (!testSpec.expectedOutputStrategies.isEmpty()) { assertThat( testSpec.typeInferenceExtraction.get().getInputTypeStrategy(), equalTo( testSpec.expectedOutputStrategies.keySet().stream() .reduce(InputTypeStrategies::or) .orElseThrow(AssertionError::new))); } } @Test public void testAccumulatorTypeStrategy() { if (!testSpec.expectedAccumulatorStrategies.isEmpty()) { assertThat( testSpec.typeInferenceExtraction.get().getAccumulatorTypeStrategy().isPresent(), equalTo(true)); assertThat( testSpec.typeInferenceExtraction.get().getAccumulatorTypeStrategy().get(), equalTo(TypeStrategies.mapping(testSpec.expectedAccumulatorStrategies))); } } @Test public void testOutputTypeStrategy() { if (!testSpec.expectedOutputStrategies.isEmpty()) { assertThat( testSpec.typeInferenceExtraction.get().getOutputTypeStrategy(), equalTo(TypeStrategies.mapping(testSpec.expectedOutputStrategies))); } } @Test public void testErrorMessage() { if (testSpec.expectedErrorMessage != null) { thrown.expect(ValidationException.class); thrown.expectCause(errorMatcher(testSpec)); testSpec.typeInferenceExtraction.get(); } } // -------------------------------------------------------------------------------------------- // Test utilities // -------------------------------------------------------------------------------------------- /** Test specification shared with the Scala tests. */ static class TestSpec { private final String description; final Supplier<TypeInference> typeInferenceExtraction; @Nullable List<String> expectedArgumentNames; @Nullable List<DataType> expectedArgumentTypes; Map<InputTypeStrategy, TypeStrategy> expectedAccumulatorStrategies; Map<InputTypeStrategy, TypeStrategy> expectedOutputStrategies; @Nullable String expectedErrorMessage; private TestSpec(String description, Supplier<TypeInference> typeInferenceExtraction) { this.description = description; this.typeInferenceExtraction = typeInferenceExtraction; this.expectedAccumulatorStrategies = new LinkedHashMap<>(); this.expectedOutputStrategies = new LinkedHashMap<>(); } static TestSpec forScalarFunction(Class<? extends ScalarFunction> function) { return forScalarFunction(null, function); } static TestSpec forScalarFunction( String description, Class<? extends ScalarFunction> function) { return new TestSpec( description == null ? function.getSimpleName() : description, () -> TypeInferenceExtractor.forScalarFunction( new DataTypeFactoryMock(), function)); } static TestSpec forAggregateFunction(Class<? extends AggregateFunction<?, ?>> function) { return forAggregateFunction(null, function); } static TestSpec forAggregateFunction( String description, Class<? extends AggregateFunction<?, ?>> function) { return new TestSpec( description == null ? function.getSimpleName() : description, () -> TypeInferenceExtractor.forAggregateFunction( new DataTypeFactoryMock(), function)); } static TestSpec forTableFunction(Class<? extends TableFunction<?>> function) { return forTableFunction(null, function); } static TestSpec forTableFunction( String description, Class<? extends TableFunction<?>> function) { return new TestSpec( description == null ? function.getSimpleName() : description, () -> TypeInferenceExtractor.forTableFunction( new DataTypeFactoryMock(), function)); } static TestSpec forTableAggregateFunction( Class<? extends TableAggregateFunction<?, ?>> function) { return forTableAggregateFunction(null, function); } static TestSpec forTableAggregateFunction( String description, Class<? extends TableAggregateFunction<?, ?>> function) { return new TestSpec( description == null ? function.getSimpleName() : description, () -> TypeInferenceExtractor.forTableAggregateFunction( new DataTypeFactoryMock(), function)); } TestSpec expectNamedArguments(String... expectedArgumentNames) { this.expectedArgumentNames = Arrays.asList(expectedArgumentNames); return this; } TestSpec expectTypedArguments(DataType... expectedArgumentTypes) { this.expectedArgumentTypes = Arrays.asList(expectedArgumentTypes); return this; } TestSpec expectAccumulatorMapping( InputTypeStrategy validator, TypeStrategy accumulatorStrategy) { this.expectedAccumulatorStrategies.put(validator, accumulatorStrategy); return this; } TestSpec expectOutputMapping(InputTypeStrategy validator, TypeStrategy outputStrategy) { this.expectedOutputStrategies.put(validator, outputStrategy); return this; } TestSpec expectErrorMessage(String expectedErrorMessage) { this.expectedErrorMessage = expectedErrorMessage; return this; } @Override public String toString() { return description; } } static Matcher<Throwable> errorMatcher(TestSpec testSpec) { return containsCause(new ValidationException(testSpec.expectedErrorMessage)); } // -------------------------------------------------------------------------------------------- // Test classes for extraction // -------------------------------------------------------------------------------------------- @FunctionHint( input = {@DataTypeHint("INT"), @DataTypeHint("STRING")}, argumentNames = {"i", "s"}, output = @DataTypeHint("BOOLEAN")) private static class FullFunctionHint extends ScalarFunction { public Boolean eval(Integer i, String s) { return null; } } private static class ComplexFunctionHint extends ScalarFunction { @FunctionHint( input = {@DataTypeHint("ARRAY<INT>"), @DataTypeHint(inputGroup = InputGroup.ANY)}, argumentNames = {"myInt", "myAny"}, output = @DataTypeHint("BOOLEAN"), isVarArgs = true) public Boolean eval(Object... o) { return null; } } @FunctionHint(input = @DataTypeHint("INT"), output = @DataTypeHint("INT")) @FunctionHint(input = @DataTypeHint("BIGINT"), output = @DataTypeHint("BIGINT")) private static class FullFunctionHints extends ScalarFunction { public Number eval(Number n) { return null; } } @FunctionHint(output = @DataTypeHint("INT")) private static class GlobalOutputFunctionHint extends ScalarFunction { @FunctionHint(input = @DataTypeHint("INT")) public Integer eval(Integer n) { return null; } @FunctionHint(input = @DataTypeHint("STRING")) public Integer eval(String n) { return null; } } @FunctionHint(output = @DataTypeHint("INT")) private static class InvalidSingleOutputFunctionHint extends ScalarFunction { @FunctionHint(output = @DataTypeHint("TINYINT")) public Integer eval(Number n) { return null; } } @FunctionHint(input = @DataTypeHint("INT"), output = @DataTypeHint("INT")) private static class SplitFullFunctionHints extends ScalarFunction { @FunctionHint(input = @DataTypeHint("BIGINT"), output = @DataTypeHint("BIGINT")) public Number eval(Number n) { return null; } } @FunctionHint(input = @DataTypeHint("INT"), output = @DataTypeHint("INT")) private static class InvalidFullOutputFunctionHint extends ScalarFunction { @FunctionHint(input = @DataTypeHint("INT"), output = @DataTypeHint("BIGINT")) public Number eval(Integer i) { return null; } } @FunctionHint(input = @DataTypeHint("INT"), argumentNames = "a", output = @DataTypeHint("INT")) private static class InvalidFullOutputFunctionWithArgNamesHint extends ScalarFunction { @FunctionHint( input = @DataTypeHint("INT"), argumentNames = "b", output = @DataTypeHint("BIGINT")) public Number eval(Integer i) { return null; } } @FunctionHint(input = @DataTypeHint("INT")) private static class InvalidLocalOutputFunctionHint extends ScalarFunction { @FunctionHint(output = @DataTypeHint("INT")) public Integer eval(Integer n) { return null; } @FunctionHint(output = @DataTypeHint("STRING")) public Integer eval(String n) { return null; } } @FunctionHint( input = {@DataTypeHint("INT"), @DataTypeHint()}, output = @DataTypeHint("BOOLEAN")) private static class IncompleteFunctionHint extends ScalarFunction { public Boolean eval(Integer i1, Integer i2) { return null; } } @FunctionHint(input = @DataTypeHint("INT")) @FunctionHint(input = @DataTypeHint("BIGINT")) private static class GlobalInputFunctionHints extends ScalarFunction { @FunctionHint(output = @DataTypeHint("INT")) public Integer eval(Number n) { return null; } } private static class ZeroArgFunction extends ScalarFunction { public Integer eval() { return null; } } private static class MixedArgFunction extends ScalarFunction { public Integer eval(int i, Double d) { return null; } } private static class OverloadedFunction extends ScalarFunction { public Integer eval(int i, Double d) { return null; } public long eval(String s) { return 0L; } } private static class VarArgFunction extends ScalarFunction { public String eval(int i, int... more) { return null; } } private static class VarArgWithByteFunction extends ScalarFunction { public String eval(byte... bytes) { return null; } } @FunctionHint(output = @DataTypeHint("INT")) private static class ExtractWithOutputHintFunction extends ScalarFunction { public Object eval(Integer i) { return null; } } @FunctionHint( input = {@DataTypeHint("INT"), @DataTypeHint("BOOLEAN")}, argumentNames = {"i", "b"}) private static class ExtractWithInputHintFunction extends ScalarFunction { public double eval(Object... o) { return 0.0; } } @FunctionHint(input = @DataTypeHint("BIGINT"), accumulator = @DataTypeHint("ROW<f BIGINT>")) @FunctionHint(input = @DataTypeHint("STRING"), accumulator = @DataTypeHint("ROW<f STRING>")) private static class InputDependentAccumulatorFunction extends AggregateFunction<String, Row> { public void accumulate(Row accumulator, Object o) { // nothing to do } @Override public String getValue(Row accumulator) { return null; } @Override public Row createAccumulator() { return null; } } @FunctionHint(output = @DataTypeHint("STRING")) private static class AggregateFunctionWithManyAnnotations extends AggregateFunction<String, Row> { @FunctionHint(accumulator = @DataTypeHint("ROW<b BOOLEAN>")) public void accumulate(Row accumulator, @DataTypeHint("ROW<i INT, b BOOLEAN>") Row r) { // nothing to do } @Override public String getValue(Row accumulator) { return null; } @Override public Row createAccumulator() { return null; } } @FunctionHint(output = @DataTypeHint("ROW<i INT, b BOOLEAN>")) private static class OutputHintTableFunction extends TableFunction<Row> { public void eval(int i) { // nothing to do } } @FunctionHint(output = @DataTypeHint("STRING")) private static class InvalidMethodScalarFunction extends ScalarFunction { public Long eval(int[] i) { return null; } } @FunctionHint(accumulator = @DataTypeHint("INT")) private static class InvalidMethodAggregateFunction extends AggregateFunction<String, Boolean> { public void accumulate(Boolean acc, int a, boolean b) { // nothing to do } @Override public String getValue(Boolean accumulator) { return null; } @Override public Boolean createAccumulator() { return null; } } private static class MissingMethodTableFunction extends TableFunction<String> { // nothing to do } private static class NamedArgumentsScalarFunction extends ScalarFunction { public Integer eval(int n) { return null; } public Integer eval(long n) { return null; } public Integer eval(@DataTypeHint("DECIMAL(10, 2)") Object n) { return null; } } private static class InputGroupScalarFunction extends ScalarFunction { public String eval(@DataTypeHint(inputGroup = InputGroup.ANY) Object o) { return o.toString(); } } private static class VarArgInputGroupScalarFunction extends ScalarFunction { public String eval(@DataTypeHint(inputGroup = InputGroup.ANY) Object... o) { return Arrays.toString(o); } } // extracted order is f(INT) || f(BIGINT) due to method signature sorting private static class OrderedScalarFunction extends ScalarFunction { public Long eval(Long l) { return l; } public Integer eval(Integer i) { return i; } } // extracted order is f(BIGINT) || f(INT) @FunctionHint(input = @DataTypeHint("BIGINT"), output = @DataTypeHint("BIGINT")) @FunctionHint(input = @DataTypeHint("INT"), output = @DataTypeHint("INT")) private static class OrderedScalarFunction2 extends ScalarFunction { public Number eval(Number n) { return n; } } // extracted order is f(BIGINT) || f(INT) private static class OrderedScalarFunction3 extends ScalarFunction { @FunctionHint(input = @DataTypeHint("BIGINT"), output = @DataTypeHint("BIGINT")) @FunctionHint(input = @DataTypeHint("INT"), output = @DataTypeHint("INT")) public Number eval(Number n) { return n; } } @DataTypeHint("ROW<i INT>") private static class DataTypeHintOnTableFunctionClass extends TableFunction<Row> { public void eval() { // nothing to do } } private static class DataTypeHintOnTableFunctionMethod extends TableFunction<Row> { @DataTypeHint("ROW<i INT>") public void eval(Integer i) { // nothing to do } } @DataTypeHint("ROW<i BOOLEAN>") private static class InvalidDataTypeHintOnTableFunction extends TableFunction<Row> { @DataTypeHint("ROW<i INT>") public void eval(Integer i) { // nothing to do } } private static class DataTypeHintOnScalarFunction extends ScalarFunction { public @DataTypeHint("ROW<i INT>") RowData eval() { return null; } } }
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. */ package com.microsoft.azure.management.automation.v2015_10_31.implementation; import retrofit2.Retrofit; import com.google.common.reflect.TypeToken; import com.microsoft.azure.AzureServiceFuture; import com.microsoft.azure.CloudException; import com.microsoft.azure.ListOperationCallback; import com.microsoft.azure.management.automation.v2015_10_31.CredentialCreateOrUpdateParameters; import com.microsoft.azure.management.automation.v2015_10_31.CredentialUpdateParameters; import com.microsoft.azure.management.automation.v2015_10_31.ErrorResponseException; import com.microsoft.azure.Page; import com.microsoft.azure.PagedList; import com.microsoft.rest.ServiceCallback; import com.microsoft.rest.ServiceFuture; import com.microsoft.rest.ServiceResponse; import com.microsoft.rest.Validator; import java.io.IOException; import java.util.List; import okhttp3.ResponseBody; import retrofit2.http.Body; import retrofit2.http.GET; import retrofit2.http.Header; import retrofit2.http.Headers; import retrofit2.http.HTTP; import retrofit2.http.PATCH; import retrofit2.http.Path; import retrofit2.http.PUT; import retrofit2.http.Query; import retrofit2.http.Url; import retrofit2.Response; import rx.functions.Func1; import rx.Observable; /** * An instance of this class provides access to all the operations defined * in Credentials. */ public class CredentialsInner { /** The Retrofit service to perform REST calls. */ private CredentialsService service; /** The service client containing this operation class. */ private AutomationClientImpl client; /** * Initializes an instance of CredentialsInner. * * @param retrofit the Retrofit instance built from a Retrofit Builder. * @param client the instance of the service client containing this operation class. */ public CredentialsInner(Retrofit retrofit, AutomationClientImpl client) { this.service = retrofit.create(CredentialsService.class); this.client = client; } /** * The interface defining all the services for Credentials to be * used by Retrofit to perform actually REST calls. */ interface CredentialsService { @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.automation.v2015_10_31.Credentials delete" }) @HTTP(path = "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Automation/automationAccounts/{automationAccountName}/credentials/{credentialName}", method = "DELETE", hasBody = true) Observable<Response<ResponseBody>> delete(@Path("resourceGroupName") String resourceGroupName, @Path("automationAccountName") String automationAccountName, @Path("credentialName") String credentialName, @Path("subscriptionId") String subscriptionId, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.automation.v2015_10_31.Credentials get" }) @GET("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Automation/automationAccounts/{automationAccountName}/credentials/{credentialName}") Observable<Response<ResponseBody>> get(@Path("resourceGroupName") String resourceGroupName, @Path("automationAccountName") String automationAccountName, @Path("credentialName") String credentialName, @Path("subscriptionId") String subscriptionId, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.automation.v2015_10_31.Credentials createOrUpdate" }) @PUT("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Automation/automationAccounts/{automationAccountName}/credentials/{credentialName}") Observable<Response<ResponseBody>> createOrUpdate(@Path("resourceGroupName") String resourceGroupName, @Path("automationAccountName") String automationAccountName, @Path("credentialName") String credentialName, @Path("subscriptionId") String subscriptionId, @Body CredentialCreateOrUpdateParameters parameters, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.automation.v2015_10_31.Credentials update" }) @PATCH("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Automation/automationAccounts/{automationAccountName}/credentials/{credentialName}") Observable<Response<ResponseBody>> update(@Path("resourceGroupName") String resourceGroupName, @Path("automationAccountName") String automationAccountName, @Path("credentialName") String credentialName, @Path("subscriptionId") String subscriptionId, @Body CredentialUpdateParameters parameters, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.automation.v2015_10_31.Credentials listByAutomationAccount" }) @GET("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Automation/automationAccounts/{automationAccountName}/credentials") Observable<Response<ResponseBody>> listByAutomationAccount(@Path("resourceGroupName") String resourceGroupName, @Path("automationAccountName") String automationAccountName, @Path("subscriptionId") String subscriptionId, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.automation.v2015_10_31.Credentials listByAutomationAccountNext" }) @GET Observable<Response<ResponseBody>> listByAutomationAccountNext(@Url String nextUrl, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); } /** * Delete the credential. * * @param resourceGroupName Name of an Azure Resource group. * @param automationAccountName The name of the automation account. * @param credentialName The name of credential. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws ErrorResponseException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent */ public void delete(String resourceGroupName, String automationAccountName, String credentialName) { deleteWithServiceResponseAsync(resourceGroupName, automationAccountName, credentialName).toBlocking().single().body(); } /** * Delete the credential. * * @param resourceGroupName Name of an Azure Resource group. * @param automationAccountName The name of the automation account. * @param credentialName The name of credential. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<Void> deleteAsync(String resourceGroupName, String automationAccountName, String credentialName, final ServiceCallback<Void> serviceCallback) { return ServiceFuture.fromResponse(deleteWithServiceResponseAsync(resourceGroupName, automationAccountName, credentialName), serviceCallback); } /** * Delete the credential. * * @param resourceGroupName Name of an Azure Resource group. * @param automationAccountName The name of the automation account. * @param credentialName The name of credential. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceResponse} object if successful. */ public Observable<Void> deleteAsync(String resourceGroupName, String automationAccountName, String credentialName) { return deleteWithServiceResponseAsync(resourceGroupName, automationAccountName, credentialName).map(new Func1<ServiceResponse<Void>, Void>() { @Override public Void call(ServiceResponse<Void> response) { return response.body(); } }); } /** * Delete the credential. * * @param resourceGroupName Name of an Azure Resource group. * @param automationAccountName The name of the automation account. * @param credentialName The name of credential. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceResponse} object if successful. */ public Observable<ServiceResponse<Void>> deleteWithServiceResponseAsync(String resourceGroupName, String automationAccountName, String credentialName) { if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (automationAccountName == null) { throw new IllegalArgumentException("Parameter automationAccountName is required and cannot be null."); } if (credentialName == null) { throw new IllegalArgumentException("Parameter credentialName is required and cannot be null."); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } final String apiVersion = "2015-10-31"; return service.delete(resourceGroupName, automationAccountName, credentialName, this.client.subscriptionId(), apiVersion, this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Void>>>() { @Override public Observable<ServiceResponse<Void>> call(Response<ResponseBody> response) { try { ServiceResponse<Void> clientResponse = deleteDelegate(response); return Observable.just(clientResponse); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<Void> deleteDelegate(Response<ResponseBody> response) throws ErrorResponseException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<Void, ErrorResponseException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<Void>() { }.getType()) .registerError(ErrorResponseException.class) .build(response); } /** * Retrieve the credential identified by credential name. * * @param resourceGroupName Name of an Azure Resource group. * @param automationAccountName The name of the automation account. * @param credentialName The name of credential. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws ErrorResponseException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the CredentialInner object if successful. */ public CredentialInner get(String resourceGroupName, String automationAccountName, String credentialName) { return getWithServiceResponseAsync(resourceGroupName, automationAccountName, credentialName).toBlocking().single().body(); } /** * Retrieve the credential identified by credential name. * * @param resourceGroupName Name of an Azure Resource group. * @param automationAccountName The name of the automation account. * @param credentialName The name of credential. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<CredentialInner> getAsync(String resourceGroupName, String automationAccountName, String credentialName, final ServiceCallback<CredentialInner> serviceCallback) { return ServiceFuture.fromResponse(getWithServiceResponseAsync(resourceGroupName, automationAccountName, credentialName), serviceCallback); } /** * Retrieve the credential identified by credential name. * * @param resourceGroupName Name of an Azure Resource group. * @param automationAccountName The name of the automation account. * @param credentialName The name of credential. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the CredentialInner object */ public Observable<CredentialInner> getAsync(String resourceGroupName, String automationAccountName, String credentialName) { return getWithServiceResponseAsync(resourceGroupName, automationAccountName, credentialName).map(new Func1<ServiceResponse<CredentialInner>, CredentialInner>() { @Override public CredentialInner call(ServiceResponse<CredentialInner> response) { return response.body(); } }); } /** * Retrieve the credential identified by credential name. * * @param resourceGroupName Name of an Azure Resource group. * @param automationAccountName The name of the automation account. * @param credentialName The name of credential. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the CredentialInner object */ public Observable<ServiceResponse<CredentialInner>> getWithServiceResponseAsync(String resourceGroupName, String automationAccountName, String credentialName) { if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (automationAccountName == null) { throw new IllegalArgumentException("Parameter automationAccountName is required and cannot be null."); } if (credentialName == null) { throw new IllegalArgumentException("Parameter credentialName is required and cannot be null."); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } final String apiVersion = "2015-10-31"; return service.get(resourceGroupName, automationAccountName, credentialName, this.client.subscriptionId(), apiVersion, this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<CredentialInner>>>() { @Override public Observable<ServiceResponse<CredentialInner>> call(Response<ResponseBody> response) { try { ServiceResponse<CredentialInner> clientResponse = getDelegate(response); return Observable.just(clientResponse); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<CredentialInner> getDelegate(Response<ResponseBody> response) throws ErrorResponseException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<CredentialInner, ErrorResponseException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<CredentialInner>() { }.getType()) .registerError(ErrorResponseException.class) .build(response); } /** * Create a credential. * * @param resourceGroupName Name of an Azure Resource group. * @param automationAccountName The name of the automation account. * @param credentialName The parameters supplied to the create or update credential operation. * @param parameters The parameters supplied to the create or update credential operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws ErrorResponseException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the CredentialInner object if successful. */ public CredentialInner createOrUpdate(String resourceGroupName, String automationAccountName, String credentialName, CredentialCreateOrUpdateParameters parameters) { return createOrUpdateWithServiceResponseAsync(resourceGroupName, automationAccountName, credentialName, parameters).toBlocking().single().body(); } /** * Create a credential. * * @param resourceGroupName Name of an Azure Resource group. * @param automationAccountName The name of the automation account. * @param credentialName The parameters supplied to the create or update credential operation. * @param parameters The parameters supplied to the create or update credential operation. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<CredentialInner> createOrUpdateAsync(String resourceGroupName, String automationAccountName, String credentialName, CredentialCreateOrUpdateParameters parameters, final ServiceCallback<CredentialInner> serviceCallback) { return ServiceFuture.fromResponse(createOrUpdateWithServiceResponseAsync(resourceGroupName, automationAccountName, credentialName, parameters), serviceCallback); } /** * Create a credential. * * @param resourceGroupName Name of an Azure Resource group. * @param automationAccountName The name of the automation account. * @param credentialName The parameters supplied to the create or update credential operation. * @param parameters The parameters supplied to the create or update credential operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the CredentialInner object */ public Observable<CredentialInner> createOrUpdateAsync(String resourceGroupName, String automationAccountName, String credentialName, CredentialCreateOrUpdateParameters parameters) { return createOrUpdateWithServiceResponseAsync(resourceGroupName, automationAccountName, credentialName, parameters).map(new Func1<ServiceResponse<CredentialInner>, CredentialInner>() { @Override public CredentialInner call(ServiceResponse<CredentialInner> response) { return response.body(); } }); } /** * Create a credential. * * @param resourceGroupName Name of an Azure Resource group. * @param automationAccountName The name of the automation account. * @param credentialName The parameters supplied to the create or update credential operation. * @param parameters The parameters supplied to the create or update credential operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the CredentialInner object */ public Observable<ServiceResponse<CredentialInner>> createOrUpdateWithServiceResponseAsync(String resourceGroupName, String automationAccountName, String credentialName, CredentialCreateOrUpdateParameters parameters) { if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (automationAccountName == null) { throw new IllegalArgumentException("Parameter automationAccountName is required and cannot be null."); } if (credentialName == null) { throw new IllegalArgumentException("Parameter credentialName is required and cannot be null."); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (parameters == null) { throw new IllegalArgumentException("Parameter parameters is required and cannot be null."); } Validator.validate(parameters); final String apiVersion = "2015-10-31"; return service.createOrUpdate(resourceGroupName, automationAccountName, credentialName, this.client.subscriptionId(), parameters, apiVersion, this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<CredentialInner>>>() { @Override public Observable<ServiceResponse<CredentialInner>> call(Response<ResponseBody> response) { try { ServiceResponse<CredentialInner> clientResponse = createOrUpdateDelegate(response); return Observable.just(clientResponse); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<CredentialInner> createOrUpdateDelegate(Response<ResponseBody> response) throws ErrorResponseException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<CredentialInner, ErrorResponseException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<CredentialInner>() { }.getType()) .register(201, new TypeToken<CredentialInner>() { }.getType()) .registerError(ErrorResponseException.class) .build(response); } /** * Update a credential. * * @param resourceGroupName Name of an Azure Resource group. * @param automationAccountName The name of the automation account. * @param credentialName The parameters supplied to the Update credential operation. * @param parameters The parameters supplied to the Update credential operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws ErrorResponseException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the CredentialInner object if successful. */ public CredentialInner update(String resourceGroupName, String automationAccountName, String credentialName, CredentialUpdateParameters parameters) { return updateWithServiceResponseAsync(resourceGroupName, automationAccountName, credentialName, parameters).toBlocking().single().body(); } /** * Update a credential. * * @param resourceGroupName Name of an Azure Resource group. * @param automationAccountName The name of the automation account. * @param credentialName The parameters supplied to the Update credential operation. * @param parameters The parameters supplied to the Update credential operation. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<CredentialInner> updateAsync(String resourceGroupName, String automationAccountName, String credentialName, CredentialUpdateParameters parameters, final ServiceCallback<CredentialInner> serviceCallback) { return ServiceFuture.fromResponse(updateWithServiceResponseAsync(resourceGroupName, automationAccountName, credentialName, parameters), serviceCallback); } /** * Update a credential. * * @param resourceGroupName Name of an Azure Resource group. * @param automationAccountName The name of the automation account. * @param credentialName The parameters supplied to the Update credential operation. * @param parameters The parameters supplied to the Update credential operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the CredentialInner object */ public Observable<CredentialInner> updateAsync(String resourceGroupName, String automationAccountName, String credentialName, CredentialUpdateParameters parameters) { return updateWithServiceResponseAsync(resourceGroupName, automationAccountName, credentialName, parameters).map(new Func1<ServiceResponse<CredentialInner>, CredentialInner>() { @Override public CredentialInner call(ServiceResponse<CredentialInner> response) { return response.body(); } }); } /** * Update a credential. * * @param resourceGroupName Name of an Azure Resource group. * @param automationAccountName The name of the automation account. * @param credentialName The parameters supplied to the Update credential operation. * @param parameters The parameters supplied to the Update credential operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the CredentialInner object */ public Observable<ServiceResponse<CredentialInner>> updateWithServiceResponseAsync(String resourceGroupName, String automationAccountName, String credentialName, CredentialUpdateParameters parameters) { if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (automationAccountName == null) { throw new IllegalArgumentException("Parameter automationAccountName is required and cannot be null."); } if (credentialName == null) { throw new IllegalArgumentException("Parameter credentialName is required and cannot be null."); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (parameters == null) { throw new IllegalArgumentException("Parameter parameters is required and cannot be null."); } Validator.validate(parameters); final String apiVersion = "2015-10-31"; return service.update(resourceGroupName, automationAccountName, credentialName, this.client.subscriptionId(), parameters, apiVersion, this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<CredentialInner>>>() { @Override public Observable<ServiceResponse<CredentialInner>> call(Response<ResponseBody> response) { try { ServiceResponse<CredentialInner> clientResponse = updateDelegate(response); return Observable.just(clientResponse); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<CredentialInner> updateDelegate(Response<ResponseBody> response) throws ErrorResponseException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<CredentialInner, ErrorResponseException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<CredentialInner>() { }.getType()) .registerError(ErrorResponseException.class) .build(response); } /** * Retrieve a list of credentials. * * @param resourceGroupName Name of an Azure Resource group. * @param automationAccountName The name of the automation account. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the PagedList&lt;CredentialInner&gt; object if successful. */ public PagedList<CredentialInner> listByAutomationAccount(final String resourceGroupName, final String automationAccountName) { ServiceResponse<Page<CredentialInner>> response = listByAutomationAccountSinglePageAsync(resourceGroupName, automationAccountName).toBlocking().single(); return new PagedList<CredentialInner>(response.body()) { @Override public Page<CredentialInner> nextPage(String nextPageLink) { return listByAutomationAccountNextSinglePageAsync(nextPageLink).toBlocking().single().body(); } }; } /** * Retrieve a list of credentials. * * @param resourceGroupName Name of an Azure Resource group. * @param automationAccountName The name of the automation account. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<List<CredentialInner>> listByAutomationAccountAsync(final String resourceGroupName, final String automationAccountName, final ListOperationCallback<CredentialInner> serviceCallback) { return AzureServiceFuture.fromPageResponse( listByAutomationAccountSinglePageAsync(resourceGroupName, automationAccountName), new Func1<String, Observable<ServiceResponse<Page<CredentialInner>>>>() { @Override public Observable<ServiceResponse<Page<CredentialInner>>> call(String nextPageLink) { return listByAutomationAccountNextSinglePageAsync(nextPageLink); } }, serviceCallback); } /** * Retrieve a list of credentials. * * @param resourceGroupName Name of an Azure Resource group. * @param automationAccountName The name of the automation account. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the PagedList&lt;CredentialInner&gt; object */ public Observable<Page<CredentialInner>> listByAutomationAccountAsync(final String resourceGroupName, final String automationAccountName) { return listByAutomationAccountWithServiceResponseAsync(resourceGroupName, automationAccountName) .map(new Func1<ServiceResponse<Page<CredentialInner>>, Page<CredentialInner>>() { @Override public Page<CredentialInner> call(ServiceResponse<Page<CredentialInner>> response) { return response.body(); } }); } /** * Retrieve a list of credentials. * * @param resourceGroupName Name of an Azure Resource group. * @param automationAccountName The name of the automation account. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the PagedList&lt;CredentialInner&gt; object */ public Observable<ServiceResponse<Page<CredentialInner>>> listByAutomationAccountWithServiceResponseAsync(final String resourceGroupName, final String automationAccountName) { return listByAutomationAccountSinglePageAsync(resourceGroupName, automationAccountName) .concatMap(new Func1<ServiceResponse<Page<CredentialInner>>, Observable<ServiceResponse<Page<CredentialInner>>>>() { @Override public Observable<ServiceResponse<Page<CredentialInner>>> call(ServiceResponse<Page<CredentialInner>> page) { String nextPageLink = page.body().nextPageLink(); if (nextPageLink == null) { return Observable.just(page); } return Observable.just(page).concatWith(listByAutomationAccountNextWithServiceResponseAsync(nextPageLink)); } }); } /** * Retrieve a list of credentials. * ServiceResponse<PageImpl<CredentialInner>> * @param resourceGroupName Name of an Azure Resource group. ServiceResponse<PageImpl<CredentialInner>> * @param automationAccountName The name of the automation account. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the PagedList&lt;CredentialInner&gt; object wrapped in {@link ServiceResponse} if successful. */ public Observable<ServiceResponse<Page<CredentialInner>>> listByAutomationAccountSinglePageAsync(final String resourceGroupName, final String automationAccountName) { if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (automationAccountName == null) { throw new IllegalArgumentException("Parameter automationAccountName is required and cannot be null."); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } final String apiVersion = "2015-10-31"; return service.listByAutomationAccount(resourceGroupName, automationAccountName, this.client.subscriptionId(), apiVersion, this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<CredentialInner>>>>() { @Override public Observable<ServiceResponse<Page<CredentialInner>>> call(Response<ResponseBody> response) { try { ServiceResponse<PageImpl<CredentialInner>> result = listByAutomationAccountDelegate(response); return Observable.just(new ServiceResponse<Page<CredentialInner>>(result.body(), result.response())); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<PageImpl<CredentialInner>> listByAutomationAccountDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<PageImpl<CredentialInner>, CloudException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<PageImpl<CredentialInner>>() { }.getType()) .registerError(CloudException.class) .build(response); } /** * Retrieve a list of credentials. * * @param nextPageLink The NextLink from the previous successful call to List operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the PagedList&lt;CredentialInner&gt; object if successful. */ public PagedList<CredentialInner> listByAutomationAccountNext(final String nextPageLink) { ServiceResponse<Page<CredentialInner>> response = listByAutomationAccountNextSinglePageAsync(nextPageLink).toBlocking().single(); return new PagedList<CredentialInner>(response.body()) { @Override public Page<CredentialInner> nextPage(String nextPageLink) { return listByAutomationAccountNextSinglePageAsync(nextPageLink).toBlocking().single().body(); } }; } /** * Retrieve a list of credentials. * * @param nextPageLink The NextLink from the previous successful call to List operation. * @param serviceFuture the ServiceFuture object tracking the Retrofit calls * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<List<CredentialInner>> listByAutomationAccountNextAsync(final String nextPageLink, final ServiceFuture<List<CredentialInner>> serviceFuture, final ListOperationCallback<CredentialInner> serviceCallback) { return AzureServiceFuture.fromPageResponse( listByAutomationAccountNextSinglePageAsync(nextPageLink), new Func1<String, Observable<ServiceResponse<Page<CredentialInner>>>>() { @Override public Observable<ServiceResponse<Page<CredentialInner>>> call(String nextPageLink) { return listByAutomationAccountNextSinglePageAsync(nextPageLink); } }, serviceCallback); } /** * Retrieve a list of credentials. * * @param nextPageLink The NextLink from the previous successful call to List operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the PagedList&lt;CredentialInner&gt; object */ public Observable<Page<CredentialInner>> listByAutomationAccountNextAsync(final String nextPageLink) { return listByAutomationAccountNextWithServiceResponseAsync(nextPageLink) .map(new Func1<ServiceResponse<Page<CredentialInner>>, Page<CredentialInner>>() { @Override public Page<CredentialInner> call(ServiceResponse<Page<CredentialInner>> response) { return response.body(); } }); } /** * Retrieve a list of credentials. * * @param nextPageLink The NextLink from the previous successful call to List operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the PagedList&lt;CredentialInner&gt; object */ public Observable<ServiceResponse<Page<CredentialInner>>> listByAutomationAccountNextWithServiceResponseAsync(final String nextPageLink) { return listByAutomationAccountNextSinglePageAsync(nextPageLink) .concatMap(new Func1<ServiceResponse<Page<CredentialInner>>, Observable<ServiceResponse<Page<CredentialInner>>>>() { @Override public Observable<ServiceResponse<Page<CredentialInner>>> call(ServiceResponse<Page<CredentialInner>> page) { String nextPageLink = page.body().nextPageLink(); if (nextPageLink == null) { return Observable.just(page); } return Observable.just(page).concatWith(listByAutomationAccountNextWithServiceResponseAsync(nextPageLink)); } }); } /** * Retrieve a list of credentials. * ServiceResponse<PageImpl<CredentialInner>> * @param nextPageLink The NextLink from the previous successful call to List operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the PagedList&lt;CredentialInner&gt; object wrapped in {@link ServiceResponse} if successful. */ public Observable<ServiceResponse<Page<CredentialInner>>> listByAutomationAccountNextSinglePageAsync(final String nextPageLink) { if (nextPageLink == null) { throw new IllegalArgumentException("Parameter nextPageLink is required and cannot be null."); } String nextUrl = String.format("%s", nextPageLink); return service.listByAutomationAccountNext(nextUrl, this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<CredentialInner>>>>() { @Override public Observable<ServiceResponse<Page<CredentialInner>>> call(Response<ResponseBody> response) { try { ServiceResponse<PageImpl<CredentialInner>> result = listByAutomationAccountNextDelegate(response); return Observable.just(new ServiceResponse<Page<CredentialInner>>(result.body(), result.response())); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<PageImpl<CredentialInner>> listByAutomationAccountNextDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<PageImpl<CredentialInner>, CloudException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<PageImpl<CredentialInner>>() { }.getType()) .registerError(CloudException.class) .build(response); } }
/* * Copyright 2009-2010 WSO2, Inc. (http://wso2.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence; import java.net.URI; import java.net.URISyntaxException; import java.util.List; import javax.xml.namespace.QName; import org.apache.synapse.endpoints.Endpoint; import org.apache.synapse.mediators.base.SequenceMediator; import org.apache.synapse.util.xpath.SynapseXPath; import org.codehaus.plexus.util.StringUtils; import org.eclipse.core.runtime.Assert; import org.eclipse.emf.ecore.EObject; import org.jaxen.JaxenException; import org.wso2.developerstudio.eclipse.gmf.esb.EsbNode; import org.wso2.developerstudio.eclipse.gmf.esb.FaultCodeType; import org.wso2.developerstudio.eclipse.gmf.esb.FaultMediator; import org.wso2.developerstudio.eclipse.gmf.esb.persistence.EsbNodeTransformer; import org.wso2.developerstudio.eclipse.gmf.esb.persistence.TransformationInfo; import org.wso2.developerstudio.eclipse.gmf.esb.persistence.TransformerException; import org.wso2.developerstudio.eclipse.gmf.esb.persistence.ValidationConstansts; /** * {@link EsbNodeTransformer} responsible for transforming {@link org.wso2.developerstudio.eclipse.gmf.esb.LogMediator} * model objects into corresponding synapse artifact(s). */ public class FaultMediatorTransformer extends AbstractEsbNodeTransformer { public static final String soap11EnvNS = "http://schemas.xmlsoap.org/soap/envelope/"; public static final String soap12EnvNS = "http://www.w3.org/2003/05/soap-envelope"; /** * {@inheritDoc} */ public void transform(TransformationInfo info, EsbNode subject) throws TransformerException { try { info.getParentSequence().addChild(createFaultMediator(subject, false)); // Transform the fault mediator output data flow path. doTransform(info, ((FaultMediator) subject).getOutputConnector()); } catch (JaxenException e) { throw new TransformerException(e); } catch (URISyntaxException e) { throw new TransformerException(e); } } public void createSynapseObject(TransformationInfo info, EObject subject, List<Endpoint> endPoints) { // TODO Auto-generated method stub } public void transformWithinSequence(TransformationInfo information, EsbNode subject, SequenceMediator sequence) throws TransformerException { try { sequence.addChild(createFaultMediator(subject, false)); doTransformWithinSequence(information, ((FaultMediator) subject).getOutputConnector().getOutgoingLink(), sequence); } catch (JaxenException e) { throw new TransformerException(e); } catch (URISyntaxException e) { throw new TransformerException(e); } } public static org.apache.synapse.mediators.transform.FaultMediator createFaultMediator(EsbNode subject, boolean isForValidation) throws JaxenException, URISyntaxException { // Check subject. Assert.isTrue(subject instanceof FaultMediator, "Invalid subject."); FaultMediator visualFault = (FaultMediator) subject; // Configure fault mediator. org.apache.synapse.mediators.transform.FaultMediator faultMediator = new org.apache.synapse.mediators.transform.FaultMediator(); setCommonProperties(faultMediator, visualFault); { // Soap Version. switch (visualFault.getSoapVersion()) { case SOAP_11: faultMediator.setSoapVersion(org.apache.synapse.mediators.transform.FaultMediator.SOAP11); if (visualFault.getFaultActor() != null) { faultMediator.setFaultRole(new URI(visualFault.getFaultActor())); } if (visualFault.getFaultCodeType().equals(FaultCodeType.EXPRESSION)) { SynapseXPath codeTypeExpression; if (!isForValidation && StringUtils.isEmpty(visualFault.getFaultCodeExpression().getPropertyValue())) { // Fill the XPath with a default values, so that we can use synapse serializer codeTypeExpression = new SynapseXPath(ValidationConstansts.DEFAULT_XPATH_FOR_VALIDATION); } else { codeTypeExpression = new SynapseXPath(visualFault.getFaultCodeExpression().getPropertyValue()); } for (int i = 0; i < visualFault.getFaultCodeExpression().getNamespaces().keySet().size(); ++i) { String prefix = (String) visualFault.getFaultCodeExpression().getNamespaces().keySet() .toArray()[i]; String namespaceUri = visualFault.getFaultCodeExpression().getNamespaces().get(prefix); codeTypeExpression.addNamespace(prefix, namespaceUri); } faultMediator.setFaultCodeExpr(codeTypeExpression); } else { switch (visualFault.getFaultCodeSoap11()) { case VERSION_MISSMATCH: faultMediator.setFaultCodeValue(new QName(soap11EnvNS, "VersionMismatch", "soap11Env")); break; case MUST_UNDERSTAND: faultMediator.setFaultCodeValue(new QName(soap11EnvNS, "MustUnderstand", "soap11Env")); break; case SERVER: faultMediator.setFaultCodeValue(new QName(soap11EnvNS, "Server", "soap11Env")); break; case CLIENT: faultMediator.setFaultCodeValue(new QName(soap11EnvNS, "Client", "soap11Env")); break; } } switch (visualFault.getFaultStringType()) { case VALUE: faultMediator.setFaultReasonValue(visualFault.getFaultStringValue()); break; case EXPRESSION: SynapseXPath reasonExpression = new SynapseXPath( visualFault.getFaultStringExpression().getPropertyValue()); for (int i = 0; i < visualFault.getFaultStringExpression().getNamespaces().keySet().size(); ++i) { String prefix = (String) visualFault.getFaultStringExpression().getNamespaces().keySet() .toArray()[i]; String namespaceUri = visualFault.getFaultStringExpression().getNamespaces().get(prefix); reasonExpression.addNamespace(prefix, namespaceUri); } faultMediator.setFaultReasonExpr(reasonExpression); break; } switch (visualFault.getFaultDetailType()) { case VALUE: faultMediator.setFaultDetail(visualFault.getFaultDetailValue()); break; case EXPRESSION: SynapseXPath detailExpression = new SynapseXPath( visualFault.getFaultDetailExpression().getPropertyValue()); for (int i = 0; i < visualFault.getFaultDetailExpression().getNamespaces().keySet().size(); ++i) { String prefix = (String) visualFault.getFaultDetailExpression().getNamespaces().keySet() .toArray()[i]; String namespaceUri = visualFault.getFaultDetailExpression().getNamespaces().get(prefix); detailExpression.addNamespace(prefix, namespaceUri); } faultMediator.setFaultDetailExpr(detailExpression); break; } break; case SOAP_12: faultMediator.setSoapVersion(org.apache.synapse.mediators.transform.FaultMediator.SOAP12); if (visualFault.getRoleName() != null) { faultMediator.setFaultRole(new URI(visualFault.getRoleName())); } if (visualFault.getFaultCodeType().equals(FaultCodeType.EXPRESSION)) { SynapseXPath codeTypeExpression; if(!isForValidation && StringUtils.isEmpty(visualFault.getFaultCodeExpression().getPropertyValue())) { // Fill the XPath with a default values, so that we can use synapse serializer codeTypeExpression = new SynapseXPath(ValidationConstansts.DEFAULT_XPATH_FOR_VALIDATION); } else { codeTypeExpression = new SynapseXPath(visualFault.getFaultCodeExpression().getPropertyValue()); } for (int i = 0; i < visualFault.getFaultCodeExpression().getNamespaces().keySet().size(); ++i) { String prefix = (String) visualFault.getFaultCodeExpression().getNamespaces().keySet() .toArray()[i]; String namespaceUri = visualFault.getFaultCodeExpression().getNamespaces().get(prefix); codeTypeExpression.addNamespace(prefix, namespaceUri); } faultMediator.setFaultCodeExpr(codeTypeExpression); } else { switch (visualFault.getFaultCodeSoap12()) { case VERSION_MISSMATCH: faultMediator.setFaultCodeValue(new QName(soap12EnvNS, "VersionMismatch", "soap12Env")); break; case MUST_UNDERSTAND: faultMediator.setFaultCodeValue(new QName(soap12EnvNS, "MustUnderstand", "soap12Env")); break; case SENDER: faultMediator.setFaultCodeValue(new QName(soap12EnvNS, "Sender", "soap12Env")); break; case RECEIVER: faultMediator.setFaultCodeValue(new QName(soap12EnvNS, "Receiver", "soap12Env")); break; case DATA_ENCODING_UNKNOWN: faultMediator.setFaultCodeValue(new QName(soap12EnvNS, "DataEncodingUnknown", "soap12Env")); break; } } switch (visualFault.getFaultReasonType()) { case VALUE: faultMediator.setFaultReasonValue(visualFault.getFaultReasonValue()); break; case EXPRESSION: SynapseXPath reasonExpression; if(!isForValidation && StringUtils.isEmpty(visualFault.getFaultReasonExpression().getPropertyValue())) { // Fill the XPath with a default values, so that we can use synapse serializer reasonExpression = new SynapseXPath(ValidationConstansts.DEFAULT_XPATH_FOR_VALIDATION); } else { reasonExpression = new SynapseXPath(visualFault.getFaultReasonExpression().getPropertyValue()); } for (int i = 0; i < visualFault.getFaultReasonExpression().getNamespaces().keySet().size(); ++i) { String prefix = (String) visualFault.getFaultReasonExpression().getNamespaces().keySet() .toArray()[i]; String namespaceUri = visualFault.getFaultReasonExpression().getNamespaces().get(prefix); reasonExpression.addNamespace(prefix, namespaceUri); } faultMediator.setFaultReasonExpr(reasonExpression); break; } switch (visualFault.getFaultDetailType()) { case VALUE: faultMediator.setFaultDetail(visualFault.getFaultDetailValue()); break; case EXPRESSION: SynapseXPath detailExpression; if(!isForValidation && StringUtils.isEmpty(visualFault.getFaultDetailExpression().getPropertyValue())) { // Fill the XPath with a default values, so that we can use synapse serializer detailExpression = new SynapseXPath(ValidationConstansts.DEFAULT_XPATH_FOR_VALIDATION); } else { detailExpression = new SynapseXPath(visualFault.getFaultDetailExpression().getPropertyValue()); } for (int i = 0; i < visualFault.getFaultDetailExpression().getNamespaces().keySet().size(); ++i) { String prefix = (String) visualFault.getFaultDetailExpression().getNamespaces().keySet() .toArray()[i]; String namespaceUri = visualFault.getFaultDetailExpression().getNamespaces().get(prefix); detailExpression.addNamespace(prefix, namespaceUri); } faultMediator.setFaultDetailExpr(detailExpression); break; } if (visualFault.getNodeName() != null) { faultMediator.setFaultNode(new URI(visualFault.getNodeName())); } break; case POX: faultMediator.setSoapVersion(org.apache.synapse.mediators.transform.FaultMediator.POX); switch (visualFault.getFaultReasonType()) { case VALUE: faultMediator.setFaultReasonValue(visualFault.getFaultReasonValue()); break; case EXPRESSION: SynapseXPath reasonExpression; if(!isForValidation && StringUtils.isEmpty(visualFault.getFaultReasonExpression().getPropertyValue())) { // Fill the XPath with a default values, so that we can use synapse serializer reasonExpression = new SynapseXPath(ValidationConstansts.DEFAULT_XPATH_FOR_VALIDATION); } else { reasonExpression = new SynapseXPath(visualFault.getFaultReasonExpression().getPropertyValue()); } for (int i = 0; i < visualFault.getFaultReasonExpression().getNamespaces().keySet().size(); ++i) { String prefix = (String) visualFault.getFaultReasonExpression().getNamespaces().keySet() .toArray()[i]; String namespaceUri = visualFault.getFaultReasonExpression().getNamespaces().get(prefix); reasonExpression.addNamespace(prefix, namespaceUri); } faultMediator.setFaultReasonExpr(reasonExpression); break; } switch (visualFault.getFaultDetailType()) { case VALUE: faultMediator.setFaultDetail(visualFault.getFaultDetailValue()); break; case EXPRESSION: SynapseXPath detailExpression; if(!isForValidation && StringUtils.isEmpty(visualFault.getFaultDetailExpression().getPropertyValue())) { // Fill the XPath with a default values, so that we can use synapse serializer detailExpression = new SynapseXPath(ValidationConstansts.DEFAULT_XPATH_FOR_VALIDATION); } else { detailExpression = new SynapseXPath(visualFault.getFaultDetailExpression().getPropertyValue()); } for (int i = 0; i < visualFault.getFaultDetailExpression().getNamespaces().keySet().size(); ++i) { String prefix = (String) visualFault.getFaultDetailExpression().getNamespaces().keySet() .toArray()[i]; String namespaceUri = visualFault.getFaultDetailExpression().getNamespaces().get(prefix); detailExpression.addNamespace(prefix, namespaceUri); } faultMediator.setFaultDetailExpr(detailExpression); break; } } faultMediator.setSerializeResponse(visualFault.isSerializeResponse()); if (visualFault.isSerializeResponse()) { // Response?. faultMediator.setMarkAsResponse(visualFault.isMarkAsResponse()); } } return faultMediator; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.state; import org.apache.flink.api.common.JobID; import org.apache.flink.runtime.checkpoint.CheckpointMetaData; import org.apache.flink.runtime.checkpoint.CheckpointMetrics; import org.apache.flink.runtime.checkpoint.JobManagerTaskRestore; import org.apache.flink.runtime.checkpoint.OperatorSubtaskState; import org.apache.flink.runtime.checkpoint.PrioritizedOperatorSubtaskState; import org.apache.flink.runtime.checkpoint.StateHandleDummyUtil; import org.apache.flink.runtime.checkpoint.StateObjectCollection; import org.apache.flink.runtime.checkpoint.TaskStateSnapshot; import org.apache.flink.runtime.clusterframework.types.AllocationID; import org.apache.flink.runtime.executiongraph.ExecutionAttemptID; import org.apache.flink.runtime.jobgraph.JobVertexID; import org.apache.flink.runtime.jobgraph.OperatorID; import org.apache.flink.runtime.state.changelog.StateChangelogStorage; import org.apache.flink.runtime.state.changelog.inmemory.InMemoryStateChangelogStorage; import org.apache.flink.runtime.taskmanager.CheckpointResponder; import org.apache.flink.runtime.taskmanager.TestCheckpointResponder; import org.apache.flink.util.TestLogger; import org.apache.flink.util.concurrent.Executors; import org.junit.Assert; import org.junit.Test; import org.junit.rules.TemporaryFolder; import java.io.File; import java.io.IOException; import java.util.Iterator; import java.util.concurrent.Executor; public class TaskStateManagerImplTest extends TestLogger { /** Test reporting and retrieving prioritized local and remote state. */ @Test public void testStateReportingAndRetrieving() { JobID jobID = new JobID(); ExecutionAttemptID executionAttemptID = new ExecutionAttemptID(); TestCheckpointResponder testCheckpointResponder = new TestCheckpointResponder(); TestTaskLocalStateStore testTaskLocalStateStore = new TestTaskLocalStateStore(); InMemoryStateChangelogStorage changelogStorage = new InMemoryStateChangelogStorage(); TaskStateManager taskStateManager = taskStateManager( jobID, executionAttemptID, testCheckpointResponder, null, testTaskLocalStateStore, changelogStorage); // ---------------------------------------- test reporting // ----------------------------------------- CheckpointMetaData checkpointMetaData = new CheckpointMetaData(74L, 11L); CheckpointMetrics checkpointMetrics = new CheckpointMetrics(); TaskStateSnapshot jmTaskStateSnapshot = new TaskStateSnapshot(); OperatorID operatorID_1 = new OperatorID(1L, 1L); OperatorID operatorID_2 = new OperatorID(2L, 2L); OperatorID operatorID_3 = new OperatorID(3L, 3L); Assert.assertFalse(taskStateManager.prioritizedOperatorState(operatorID_1).isRestored()); Assert.assertFalse(taskStateManager.prioritizedOperatorState(operatorID_2).isRestored()); Assert.assertFalse(taskStateManager.prioritizedOperatorState(operatorID_3).isRestored()); KeyGroupRange keyGroupRange = new KeyGroupRange(0, 1); // Remote state of operator 1 has only managed keyed state. OperatorSubtaskState jmOperatorSubtaskState_1 = OperatorSubtaskState.builder() .setManagedKeyedState( StateHandleDummyUtil.createNewKeyedStateHandle(keyGroupRange)) .build(); // Remote state of operator 1 has only raw keyed state. OperatorSubtaskState jmOperatorSubtaskState_2 = OperatorSubtaskState.builder() .setRawKeyedState( StateHandleDummyUtil.createNewKeyedStateHandle(keyGroupRange)) .build(); jmTaskStateSnapshot.putSubtaskStateByOperatorID(operatorID_1, jmOperatorSubtaskState_1); jmTaskStateSnapshot.putSubtaskStateByOperatorID(operatorID_2, jmOperatorSubtaskState_2); TaskStateSnapshot tmTaskStateSnapshot = new TaskStateSnapshot(); // Only operator 1 has a local alternative for the managed keyed state. OperatorSubtaskState tmOperatorSubtaskState_1 = OperatorSubtaskState.builder() .setManagedKeyedState( StateHandleDummyUtil.createNewKeyedStateHandle(keyGroupRange)) .build(); tmTaskStateSnapshot.putSubtaskStateByOperatorID(operatorID_1, tmOperatorSubtaskState_1); taskStateManager.reportTaskStateSnapshots( checkpointMetaData, checkpointMetrics, jmTaskStateSnapshot, tmTaskStateSnapshot); TestCheckpointResponder.AcknowledgeReport acknowledgeReport = testCheckpointResponder.getAcknowledgeReports().get(0); // checks that the checkpoint responder and the local state store received state as // expected. Assert.assertEquals( checkpointMetaData.getCheckpointId(), acknowledgeReport.getCheckpointId()); Assert.assertEquals(checkpointMetrics, acknowledgeReport.getCheckpointMetrics()); Assert.assertEquals(executionAttemptID, acknowledgeReport.getExecutionAttemptID()); Assert.assertEquals(jobID, acknowledgeReport.getJobID()); Assert.assertEquals(jmTaskStateSnapshot, acknowledgeReport.getSubtaskState()); Assert.assertEquals( tmTaskStateSnapshot, testTaskLocalStateStore.retrieveLocalState(checkpointMetaData.getCheckpointId())); // -------------------------------------- test prio retrieving // --------------------------------------- JobManagerTaskRestore taskRestore = new JobManagerTaskRestore( checkpointMetaData.getCheckpointId(), acknowledgeReport.getSubtaskState()); taskStateManager = taskStateManager( jobID, executionAttemptID, testCheckpointResponder, taskRestore, testTaskLocalStateStore, changelogStorage); // this has remote AND local managed keyed state. PrioritizedOperatorSubtaskState prioritized_1 = taskStateManager.prioritizedOperatorState(operatorID_1); // this has only remote raw keyed state. PrioritizedOperatorSubtaskState prioritized_2 = taskStateManager.prioritizedOperatorState(operatorID_2); // not restored. PrioritizedOperatorSubtaskState prioritized_3 = taskStateManager.prioritizedOperatorState(operatorID_3); Assert.assertTrue(prioritized_1.isRestored()); Assert.assertTrue(prioritized_2.isRestored()); Assert.assertTrue(prioritized_3.isRestored()); Assert.assertTrue(taskStateManager.prioritizedOperatorState(new OperatorID()).isRestored()); // checks for operator 1. Iterator<StateObjectCollection<KeyedStateHandle>> prioritizedManagedKeyedState_1 = prioritized_1.getPrioritizedManagedKeyedState().iterator(); Assert.assertTrue(prioritizedManagedKeyedState_1.hasNext()); StateObjectCollection<KeyedStateHandle> current = prioritizedManagedKeyedState_1.next(); KeyedStateHandle keyedStateHandleExp = tmOperatorSubtaskState_1.getManagedKeyedState().iterator().next(); KeyedStateHandle keyedStateHandleAct = current.iterator().next(); Assert.assertTrue(keyedStateHandleExp == keyedStateHandleAct); Assert.assertTrue(prioritizedManagedKeyedState_1.hasNext()); current = prioritizedManagedKeyedState_1.next(); keyedStateHandleExp = jmOperatorSubtaskState_1.getManagedKeyedState().iterator().next(); keyedStateHandleAct = current.iterator().next(); Assert.assertTrue(keyedStateHandleExp == keyedStateHandleAct); Assert.assertFalse(prioritizedManagedKeyedState_1.hasNext()); // checks for operator 2. Iterator<StateObjectCollection<KeyedStateHandle>> prioritizedRawKeyedState_2 = prioritized_2.getPrioritizedRawKeyedState().iterator(); Assert.assertTrue(prioritizedRawKeyedState_2.hasNext()); current = prioritizedRawKeyedState_2.next(); keyedStateHandleExp = jmOperatorSubtaskState_2.getRawKeyedState().iterator().next(); keyedStateHandleAct = current.iterator().next(); Assert.assertTrue(keyedStateHandleExp == keyedStateHandleAct); Assert.assertFalse(prioritizedRawKeyedState_2.hasNext()); } /** * This tests if the {@link TaskStateManager} properly returns the subtask local state dir from * the corresponding {@link TaskLocalStateStoreImpl}. */ @Test public void testForwardingSubtaskLocalStateBaseDirFromLocalStateStore() throws IOException { JobID jobID = new JobID(42L, 43L); AllocationID allocationID = new AllocationID(4711L, 23L); JobVertexID jobVertexID = new JobVertexID(12L, 34L); ExecutionAttemptID executionAttemptID = new ExecutionAttemptID(); TestCheckpointResponder checkpointResponderMock = new TestCheckpointResponder(); Executor directExecutor = Executors.directExecutor(); TemporaryFolder tmpFolder = new TemporaryFolder(); try { tmpFolder.create(); File[] allocBaseDirs = new File[] { tmpFolder.newFolder(), tmpFolder.newFolder(), tmpFolder.newFolder() }; LocalRecoveryDirectoryProviderImpl directoryProvider = new LocalRecoveryDirectoryProviderImpl(allocBaseDirs, jobID, jobVertexID, 0); LocalRecoveryConfig localRecoveryConfig = new LocalRecoveryConfig(true, directoryProvider); TaskLocalStateStore taskLocalStateStore = new TaskLocalStateStoreImpl( jobID, allocationID, jobVertexID, 13, localRecoveryConfig, directExecutor); InMemoryStateChangelogStorage changelogStorage = new InMemoryStateChangelogStorage(); TaskStateManager taskStateManager = taskStateManager( jobID, executionAttemptID, checkpointResponderMock, null, taskLocalStateStore, changelogStorage); LocalRecoveryConfig localRecoveryConfFromTaskLocalStateStore = taskLocalStateStore.getLocalRecoveryConfig(); LocalRecoveryConfig localRecoveryConfFromTaskStateManager = taskStateManager.createLocalRecoveryConfig(); for (int i = 0; i < 10; ++i) { Assert.assertEquals( allocBaseDirs[i % allocBaseDirs.length], localRecoveryConfFromTaskLocalStateStore .getLocalStateDirectoryProvider() .allocationBaseDirectory(i)); Assert.assertEquals( allocBaseDirs[i % allocBaseDirs.length], localRecoveryConfFromTaskStateManager .getLocalStateDirectoryProvider() .allocationBaseDirectory(i)); } Assert.assertEquals( localRecoveryConfFromTaskLocalStateStore.isLocalRecoveryEnabled(), localRecoveryConfFromTaskStateManager.isLocalRecoveryEnabled()); } finally { tmpFolder.delete(); } } @Test public void testStateRetrievingWithFinishedOperator() { TaskStateSnapshot taskStateSnapshot = TaskStateSnapshot.FINISHED_ON_RESTORE; JobManagerTaskRestore jobManagerTaskRestore = new JobManagerTaskRestore(2, taskStateSnapshot); TaskStateManagerImpl stateManager = new TaskStateManagerImpl( new JobID(), new ExecutionAttemptID(), new TestTaskLocalStateStore(), null, jobManagerTaskRestore, new TestCheckpointResponder()); Assert.assertTrue(stateManager.isTaskDeployedAsFinished()); } public void testAcquringRestoreCheckpointId() { TaskStateManagerImpl emptyStateManager = new TaskStateManagerImpl( new JobID(), new ExecutionAttemptID(), new TestTaskLocalStateStore(), null, null, new TestCheckpointResponder()); Assert.assertFalse(emptyStateManager.getRestoreCheckpointId().isPresent()); TaskStateManagerImpl nonEmptyStateManager = new TaskStateManagerImpl( new JobID(), new ExecutionAttemptID(), new TestTaskLocalStateStore(), null, new JobManagerTaskRestore(2, new TaskStateSnapshot()), new TestCheckpointResponder()); Assert.assertEquals(2L, (long) nonEmptyStateManager.getRestoreCheckpointId().get()); } public static TaskStateManager taskStateManager( JobID jobID, ExecutionAttemptID executionAttemptID, CheckpointResponder checkpointResponderMock, JobManagerTaskRestore jobManagerTaskRestore, TaskLocalStateStore localStateStore, StateChangelogStorage<?> stateChangelogStorage) { return new TaskStateManagerImpl( jobID, executionAttemptID, localStateStore, stateChangelogStorage, jobManagerTaskRestore, checkpointResponderMock); } }
/* * Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.query; import com.hazelcast.nio.serialization.Portable; import com.hazelcast.nio.serialization.PortableReader; import com.hazelcast.nio.serialization.PortableWriter; import java.io.IOException; import java.io.Serializable; import java.math.BigDecimal; import java.math.BigInteger; import java.sql.Timestamp; import java.util.Date; import java.util.UUID; public final class SampleObjects { public static class PortableEmployee implements Portable { private int age; private String name; public PortableEmployee() { } public PortableEmployee(int age, String name) { this.age = age; this.name = name; } @Override public int getFactoryId() { return 666; } @Override public int getClassId() { return 2; } @Override public void writePortable(PortableWriter writer) throws IOException { writer.writeUTF("n", name); writer.writeInt("a", age); } @Override public void readPortable(PortableReader reader) throws IOException { name = reader.readUTF("n"); age = reader.readInt("a"); } @Override public String toString() { return "PortableEmployee{" + "age=" + age + ", name='" + name + '\'' + '}'; } } public static class ValueType implements Serializable, Comparable<ValueType> { String typeName; public ValueType(String typeName) { this.typeName = typeName; } public ValueType() { } public String getTypeName() { return typeName; } @Override public int compareTo(ValueType o) { if (o == null) { return 1; } if (typeName == null) { if (o.typeName == null) { return 0; } else { return -1; } } if (o.typeName == null) { return 1; } return typeName.compareTo(o.typeName); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } ValueType valueType = (ValueType) o; return !(typeName != null ? !typeName.equals(valueType.typeName) : valueType.typeName != null); } @Override public int hashCode() { return typeName != null ? typeName.hashCode() : 0; } } public static class Value implements Serializable { String name; ValueType type; State state; int index; public Value(String name, ValueType type, int index) { this.name = name; this.type = type; this.index = index; } public Value(State state, ValueType type, int index) { this.state = state; this.type = type; this.index = index; } public Value(String name, int index) { this.name = name; this.index = index; } public Value(String name) { this(name, null, 0); } public State getState() { return state; } public void setState(State state) { this.state = state; } public String getName() { return name; } public ValueType getType() { return type; } public int getIndex() { return index; } public void setIndex(final int index) { this.index = index; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } Value value = (Value) o; if (index != value.index) { return false; } if (name != null ? !name.equals(value.name) : value.name != null) { return false; } if (type != null ? !type.equals(value.type) : value.type != null) { return false; } return true; } @Override public int hashCode() { int result = name != null ? name.hashCode() : 0; result = 31 * result + (type != null ? type.hashCode() : 0); result = 31 * result + index; return result; } @Override public String toString() { return "Value{" + "name=" + name + ", index=" + index + ", type=" + type + '}'; } } public enum State { STATE1, STATE2 } @SuppressWarnings("unused") public static class Employee implements Serializable { long id; String name; String city; int age; boolean active; double salary; Timestamp date; Date createDate; State state; public Employee(long id, String name, int age, boolean live, double salary, State state) { this(id, name, age, live, salary); this.state = state; } public Employee(long id, String name, int age, boolean live, double salary) { this(id, name, null, age, live, salary); } public Employee(String name, int age, boolean live, double salary) { this(-1, name, age, live, salary); } public Employee(String name, String city, int age, boolean live, double salary) { this(-1, name, city, age, live, salary); } public Employee(long id, String name, String city, int age, boolean live, double salary) { this.id = id; this.name = name; this.city = city; this.age = age; this.active = live; this.salary = salary; this.createDate = new Date(); this.date = new Timestamp(createDate.getTime()); } public Employee() { } public long getId() { return id; } public void setId(long id) { this.id = id; } public Date getCreateDate() { return createDate; } public void setCreateDate(Date createDate) { this.createDate = createDate; } public void setName(String name) { this.name = name; } public void setCity(String city) { this.city = city; } public void setAge(int age) { this.age = age; } public void setActive(boolean active) { this.active = active; } public void setSalary(double salary) { this.salary = salary; } public void setDate(Timestamp date) { this.date = date; } public Timestamp getDate() { return date; } public String getName() { return name; } public String getCity() { return city; } public int getAge() { return age; } public double getSalary() { return salary; } public boolean isActive() { return active; } public State getState() { return state; } public void setState(State state) { this.state = state; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } Employee employee = (Employee) o; if (active != employee.active) { return false; } if (age != employee.age) { return false; } if (Double.compare(employee.salary, salary) != 0) { return false; } if (name != null ? !name.equals(employee.name) : employee.name != null) { return false; } return true; } @Override public int hashCode() { int result; long temp; result = name != null ? name.hashCode() : 0; result = 31 * result + age; result = 31 * result + (active ? 1 : 0); temp = salary != +0.0d ? Double.doubleToLongBits(salary) : 0L; result = 31 * result + (int) (temp ^ (temp >>> 32)); return result; } @Override public String toString() { return "Employee{" + "name='" + name + '\'' + ", city=" + city + ", age=" + age + ", active=" + active + ", salary=" + salary + '}'; } } public static class ObjectWithInteger implements Serializable { private int attribute; public ObjectWithInteger(int attribute) { this.attribute = attribute; } public int getAttribute() { return attribute; } } public static class ObjectWithLong implements Serializable { private long attribute; public ObjectWithLong(long attribute) { this.attribute = attribute; } public long getAttribute() { return attribute; } } public static class ObjectWithShort implements Serializable { private short attribute; public ObjectWithShort(short attribute) { this.attribute = attribute; } public short getAttribute() { return attribute; } } public static class ObjectWithByte implements Serializable { private byte attribute; public ObjectWithByte(byte attribute) { this.attribute = attribute; } public byte getAttribute() { return attribute; } } public static class ObjectWithFloat implements Serializable { private float attribute; public ObjectWithFloat(float attribute) { this.attribute = attribute; } public float getAttribute() { return attribute; } } public static class ObjectWithDouble implements Serializable { private double attribute; public ObjectWithDouble(double attribute) { this.attribute = attribute; } public double getAttribute() { return attribute; } } public static class ObjectWithChar implements Serializable { private char attribute; public ObjectWithChar(char attribute) { this.attribute = attribute; } public char getAttribute() { return attribute; } } public static class ObjectWithBoolean implements Serializable { private boolean attribute; public ObjectWithBoolean(boolean attribute) { this.attribute = attribute; } public boolean getAttribute() { return attribute; } } public static class ObjectWithBigDecimal implements Serializable { private BigDecimal attribute; public ObjectWithBigDecimal(BigDecimal attribute) { this.attribute = attribute; } public BigDecimal getAttribute() { return attribute; } } public static class ObjectWithBigInteger implements Serializable { private BigInteger attribute; public ObjectWithBigInteger(BigInteger attribute) { this.attribute = attribute; } public BigInteger getAttribute() { return attribute; } } public static class ObjectWithSqlTimestamp implements Serializable { private Timestamp attribute; public ObjectWithSqlTimestamp(Timestamp attribute) { this.attribute = attribute; } public Timestamp getAttribute() { return attribute; } } public static class ObjectWithSqlDate implements Serializable { private java.sql.Date attribute; public ObjectWithSqlDate(java.sql.Date attribute) { this.attribute = attribute; } public java.sql.Date getAttribute() { return attribute; } } public static class ObjectWithDate implements Serializable { private Date attribute; public ObjectWithDate(Date attribute) { this.attribute = attribute; } public Date getAttribute() { return attribute; } } public static class ObjectWithUUID implements Serializable { private UUID attribute; public ObjectWithUUID(UUID attribute) { this.attribute = attribute; } public UUID getAttribute() { return attribute; } } }
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package org.jetbrains.plugins.gradle.service.project; import com.intellij.ide.GeneralSettings; import com.intellij.ide.impl.ProjectUtil; import com.intellij.ide.projectView.ProjectView; import com.intellij.ide.util.projectWizard.WizardContext; import com.intellij.ide.wizard.AbstractWizard; import com.intellij.ide.wizard.CommitStepException; import com.intellij.openapi.application.ModalityState; import com.intellij.openapi.extensions.Extensions; import com.intellij.openapi.externalSystem.ExternalSystemModulePropertyManager; import com.intellij.openapi.externalSystem.importing.ImportSpec; import com.intellij.openapi.externalSystem.importing.ImportSpecBuilder; import com.intellij.openapi.externalSystem.model.ExternalSystemDataKeys; import com.intellij.openapi.externalSystem.service.execution.ExternalSystemJdkUtil; import com.intellij.openapi.externalSystem.service.execution.ProgressExecutionMode; import com.intellij.openapi.externalSystem.service.project.manage.ExternalProjectsManagerImpl; import com.intellij.openapi.externalSystem.service.project.wizard.AbstractExternalModuleBuilder; import com.intellij.openapi.externalSystem.service.project.wizard.ExternalModuleSettingsStep; import com.intellij.openapi.externalSystem.settings.AbstractExternalSystemSettings; import com.intellij.openapi.externalSystem.util.ExternalSystemApiUtil; import com.intellij.openapi.externalSystem.util.ExternalSystemUtil; import com.intellij.openapi.fileEditor.FileDocumentManager; import com.intellij.openapi.module.Module; import com.intellij.openapi.module.ModuleType; import com.intellij.openapi.module.ModuleTypeManager; import com.intellij.openapi.options.ConfigurationException; import com.intellij.openapi.project.DumbService; import com.intellij.openapi.project.Project; import com.intellij.openapi.project.ProjectManager; import com.intellij.openapi.project.ex.ProjectManagerEx; import com.intellij.openapi.projectRoots.Sdk; import com.intellij.openapi.roots.ModifiableRootModel; import com.intellij.openapi.roots.ui.configuration.ModulesProvider; import com.intellij.openapi.ui.DialogWrapper; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.io.FileUtilRt; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.LocalFileSystem; import com.intellij.openapi.vfs.VfsUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.projectImport.ProjectOpenProcessor; import com.intellij.psi.PsiFile; import com.intellij.psi.PsiManager; import com.intellij.util.ObjectUtils; import com.intellij.util.ThreeState; import com.intellij.util.containers.ContainerUtil; import icons.GradleIcons; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.plugins.gradle.config.GradleSettingsListenerAdapter; import org.jetbrains.plugins.gradle.service.settings.GradleProjectSettingsControl; import org.jetbrains.plugins.gradle.settings.DistributionType; import org.jetbrains.plugins.gradle.settings.GradleProjectSettings; import org.jetbrains.plugins.gradle.util.GradleBundle; import org.jetbrains.plugins.gradle.util.GradleConstants; import javax.swing.*; import java.io.File; import java.nio.file.Path; import java.nio.file.Paths; import java.util.Arrays; import java.util.Collection; /** * @author Vladislav.Soroka */ public class GradleProjectOpenProcessor extends ProjectOpenProcessor { @NotNull public static final String[] BUILD_FILE_EXTENSIONS = {GradleConstants.EXTENSION, GradleConstants.KOTLIN_DSL_SCRIPT_EXTENSION}; @Override public String getName() { return GradleBundle.message("gradle.name"); } @Nullable @Override public Icon getIcon() { return GradleIcons.Gradle; } @Override public boolean canOpenProject(VirtualFile file) { if (file.isDirectory()) { return Arrays.stream(file.getChildren()).anyMatch(GradleProjectOpenProcessor::canOpenFile); } else { return canOpenFile(file); } } public static boolean canOpenFile(VirtualFile file) { return !file.isDirectory() && Arrays.stream(BUILD_FILE_EXTENSIONS).anyMatch(file.getName()::endsWith); } @Nullable @Override public Project doOpenProject(@NotNull VirtualFile virtualFile, @Nullable Project projectToClose, boolean forceOpenInNewFrame) { projectToClose = forceOpenInNewFrame ? null : projectToClose; Path path = Paths.get(virtualFile.getPath()); return openGradleProject(null, projectToClose, path); } @Nullable public static Project openGradleProject(@Nullable Project projectToOpen, @Nullable Project projectToClose, @NotNull Path path) { GradleProjectOpenProcessor gradleProjectOpenProcessor = Extensions.findExtension(ProjectOpenProcessor.EXTENSION_POINT_NAME, GradleProjectOpenProcessor.class); VirtualFile virtualFile = VfsUtil.findFile(path, false); if (virtualFile != null && virtualFile.isDirectory()) { for (VirtualFile file : virtualFile.getChildren()) { if (gradleProjectOpenProcessor.canOpenProject(file)) { virtualFile = file; break; } } } String pathToOpen = virtualFile != null ? virtualFile.getParent().getPath() : path.toString(); final WizardContext wizardContext = new WizardContext(null, null); wizardContext.setProjectFileDirectory(pathToOpen); GradleProjectSettings gradleProjectSettings = createDefaultProjectSettings(); gradleProjectSettings.setExternalProjectPath(pathToOpen); boolean jvmFound = setupGradleJvm(ObjectUtils.chooseNotNull(projectToOpen, projectToClose), gradleProjectSettings); GradleAbstractWizard wizard = new GradleAbstractWizard(wizardContext, gradleProjectSettings); AbstractExternalModuleBuilder<GradleProjectSettings> wizardBuilder = wizard.getBuilder(); try { if (!jvmFound) { wizard.show(); } if (jvmFound || DialogWrapper.OK_EXIT_CODE == wizard.getExitCode()) { if (projectToOpen == null) { projectToOpen = ProjectManagerEx.getInstanceEx().newProject(wizardContext.getProjectName(), pathToOpen, true, false); } if (projectToOpen == null) return null; ExternalProjectsManagerImpl.getInstance(projectToOpen).setStoreExternally(true); VirtualFile finalVirtualFile = virtualFile; Project finalProjectToOpen = projectToOpen; ExternalSystemApiUtil.subscribe(projectToOpen, GradleConstants.SYSTEM_ID, new GradleSettingsListenerAdapter() { @Override public void onProjectsLinked(@NotNull Collection<GradleProjectSettings> settings) { createProjectPreview(finalProjectToOpen, pathToOpen, finalVirtualFile); } }); wizardBuilder.commit(projectToOpen, null, ModulesProvider.EMPTY_MODULES_PROVIDER); projectToOpen.save(); if (projectToClose != null) { closePreviousProject(projectToClose); } projectToOpen.putUserData(ExternalSystemDataKeys.NEWLY_IMPORTED_PROJECT, Boolean.TRUE); if (!projectToOpen.isOpen()) { ProjectManagerEx.getInstanceEx().openProject(projectToOpen); } return projectToOpen; } } finally { wizardBuilder.cleanup(); Disposer.dispose(wizard.getDisposable()); } return null; } public static void attachGradleProjectAndRefresh(@NotNull Project project, @NotNull String gradleProjectPath) { openGradleProject(project, null, Paths.get(gradleProjectPath)); } @NotNull private static GradleProjectSettings createDefaultProjectSettings() { GradleProjectSettings settings = new GradleProjectSettings(); settings.setDistributionType(DistributionType.DEFAULT_WRAPPED); settings.setStoreProjectFilesExternally(ThreeState.YES); settings.setUseQualifiedModuleNames(true); return settings; } private static void closePreviousProject(final Project projectToClose) { Project[] openProjects = ProjectManager.getInstance().getOpenProjects(); if (openProjects.length > 0) { int exitCode = ProjectUtil.confirmOpenNewProject(true); if (exitCode == GeneralSettings.OPEN_PROJECT_SAME_WINDOW) { ProjectUtil.closeAndDispose(projectToClose != null ? projectToClose : openProjects[openProjects.length - 1]); } } } private static class GradleAbstractWizard extends AbstractWizard<ExternalModuleSettingsStep> { private final AbstractExternalModuleBuilder<GradleProjectSettings> myBuilder; GradleAbstractWizard(WizardContext wizardContext, GradleProjectSettings gradleProjectSettings) { super("Open Gradle Project", (Project)null); myBuilder = new AbstractExternalModuleBuilder<GradleProjectSettings>(GradleConstants.SYSTEM_ID, gradleProjectSettings) { @Override protected void setupModule(Module module) throws ConfigurationException { super.setupModule(module); // it will be set later in any case, but save is called immediately after project creation, so, to ensure that it will be properly saved as external system module ExternalSystemModulePropertyManager.getInstance(module).setExternalId(GradleConstants.SYSTEM_ID); final Project project = module.getProject(); FileDocumentManager.getInstance().saveAllDocuments(); final GradleProjectSettings gradleProjectSettings = getExternalProjectSettings(); attachGradleProjectAndRefresh(project, gradleProjectSettings); } @Override public void setupRootModel(ModifiableRootModel modifiableRootModel) { String contentEntryPath = getContentEntryPath(); if (StringUtil.isEmpty(contentEntryPath)) { return; } File contentRootDir = new File(contentEntryPath); FileUtilRt.createDirectory(contentRootDir); LocalFileSystem fileSystem = LocalFileSystem.getInstance(); VirtualFile modelContentRootDir = fileSystem.refreshAndFindFileByIoFile(contentRootDir); if (modelContentRootDir == null) { return; } modifiableRootModel.addContentEntry(modelContentRootDir); } @Override public ModuleType getModuleType() { return ModuleTypeManager.getInstance().getDefaultModuleType(); } }; GradleProjectSettingsControl settingsControl = new GradleProjectSettingsControl(myBuilder.getExternalProjectSettings()); ExternalModuleSettingsStep<GradleProjectSettings> step = new ExternalModuleSettingsStep<GradleProjectSettings>(wizardContext, myBuilder, settingsControl) { @Override public void _commit(boolean finishChosen) throws CommitStepException { try { validate(); updateDataModel(); } catch (ConfigurationException e) { throw new CommitStepException(e.getMessage()); } } }; addStep(step); init(); } @Nullable @Override protected String getHelpID() { return null; } public AbstractExternalModuleBuilder<GradleProjectSettings> getBuilder() { return myBuilder; } } private static void attachGradleProjectAndRefresh(@NotNull Project project, @NotNull GradleProjectSettings gradleProjectSettings) { Runnable runnable = () -> { AbstractExternalSystemSettings settings = ExternalSystemApiUtil.getSettings(project, GradleConstants.SYSTEM_ID); //noinspection unchecked settings.linkProject(gradleProjectSettings); ImportSpec importSpec = new ImportSpecBuilder(project, GradleConstants.SYSTEM_ID) .use(ProgressExecutionMode.IN_BACKGROUND_ASYNC) .useDefaultCallback() .build(); ExternalSystemUtil.refreshProject(gradleProjectSettings.getExternalProjectPath(), importSpec); }; ExternalProjectsManagerImpl.getInstance(project) .runWhenInitialized( () -> DumbService.getInstance(project).runWhenSmart( () -> ExternalSystemUtil.ensureToolWindowInitialized(project, GradleConstants.SYSTEM_ID))); // execute when current dialog(if any) is closed ExternalSystemUtil.invokeLater(project, ModalityState.NON_MODAL, runnable); } private static boolean setupGradleJvm(@Nullable Project project, @NotNull GradleProjectSettings projectSettings) { final Pair<String, Sdk> sdkPair = ExternalSystemJdkUtil.getAvailableJdk(project); if (!ExternalSystemJdkUtil.USE_INTERNAL_JAVA.equals(sdkPair.first) || ExternalSystemJdkUtil.isValidJdk(sdkPair.second.getHomePath())) { projectSettings.setGradleJvm(sdkPair.first); return true; } String jdkPath = ContainerUtil.iterateAndGetLastItem(ExternalSystemJdkUtil.suggestJdkHomePaths()); if (jdkPath != null) { Sdk sdk = ExternalSystemJdkUtil.addJdk(jdkPath); projectSettings.setGradleJvm(sdk.getName()); return true; } return false; } private static void createProjectPreview(@NotNull Project project, @NotNull String rootProjectPath, @Nullable VirtualFile virtualFile) { ExternalSystemUtil.refreshProject(project, GradleConstants.SYSTEM_ID, rootProjectPath, true, ProgressExecutionMode.MODAL_SYNC); ExternalProjectsManagerImpl.getInstance(project).runWhenInitialized(() -> DumbService.getInstance(project).runWhenSmart(() -> { ExternalSystemUtil.ensureToolWindowInitialized(project, GradleConstants.SYSTEM_ID); if (virtualFile == null) return; final PsiFile psiFile = PsiManager.getInstance(project).findFile(virtualFile); if (psiFile != null) { ProjectView.getInstance(project).selectPsiElement(psiFile, false); } })); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.operator.scalar; import com.facebook.presto.annotation.UsedByGeneratedCode; import com.facebook.presto.common.NotSupportedException; import com.facebook.presto.common.block.Block; import com.facebook.presto.common.block.SingleMapBlock; import com.facebook.presto.common.function.OperatorType; import com.facebook.presto.common.function.QualifiedFunctionName; import com.facebook.presto.common.type.Type; import com.facebook.presto.metadata.BoundVariables; import com.facebook.presto.metadata.FunctionAndTypeManager; import com.facebook.presto.metadata.SqlScalarFunction; import com.facebook.presto.spi.PrestoException; import com.facebook.presto.spi.function.FunctionKind; import com.facebook.presto.spi.function.Signature; import com.facebook.presto.spi.function.SqlFunctionVisibility; import com.google.common.collect.ImmutableList; import com.google.common.primitives.Primitives; import io.airlift.slice.Slice; import java.lang.invoke.MethodHandle; import static com.facebook.presto.common.block.MethodHandleUtil.compose; import static com.facebook.presto.common.block.MethodHandleUtil.nativeValueGetter; import static com.facebook.presto.common.type.TypeSignature.parseTypeSignature; import static com.facebook.presto.common.type.TypeUtils.readNativeValue; import static com.facebook.presto.metadata.BuiltInFunctionNamespaceManager.DEFAULT_NAMESPACE; import static com.facebook.presto.operator.scalar.BuiltInScalarFunctionImplementation.ArgumentProperty.valueTypeArgumentProperty; import static com.facebook.presto.operator.scalar.BuiltInScalarFunctionImplementation.NullConvention.RETURN_NULL_ON_NULL; import static com.facebook.presto.spi.StandardErrorCode.NOT_SUPPORTED; import static com.facebook.presto.spi.function.Signature.typeVariable; import static com.facebook.presto.spi.function.SqlFunctionVisibility.PUBLIC; import static com.facebook.presto.sql.analyzer.TypeSignatureProvider.fromTypes; import static com.facebook.presto.util.Reflection.methodHandle; public class MapElementAtFunction extends SqlScalarFunction { public static final MapElementAtFunction MAP_ELEMENT_AT = new MapElementAtFunction(); private static final MethodHandle METHOD_HANDLE_BOOLEAN = methodHandle(MapElementAtFunction.class, "elementAt", MethodHandle.class, MethodHandle.class, MethodHandle.class, Type.class, Block.class, boolean.class); private static final MethodHandle METHOD_HANDLE_LONG = methodHandle(MapElementAtFunction.class, "elementAt", MethodHandle.class, MethodHandle.class, MethodHandle.class, Type.class, Block.class, long.class); private static final MethodHandle METHOD_HANDLE_DOUBLE = methodHandle(MapElementAtFunction.class, "elementAt", MethodHandle.class, MethodHandle.class, MethodHandle.class, Type.class, Block.class, double.class); private static final MethodHandle METHOD_HANDLE_SLICE = methodHandle(MapElementAtFunction.class, "elementAt", MethodHandle.class, MethodHandle.class, MethodHandle.class, Type.class, Block.class, Slice.class); private static final MethodHandle METHOD_HANDLE_OBJECT = methodHandle(MapElementAtFunction.class, "elementAt", MethodHandle.class, MethodHandle.class, MethodHandle.class, Type.class, Block.class, Object.class); protected MapElementAtFunction() { super(new Signature( QualifiedFunctionName.of(DEFAULT_NAMESPACE, "element_at"), FunctionKind.SCALAR, ImmutableList.of(typeVariable("K"), typeVariable("V")), ImmutableList.of(), parseTypeSignature("V"), ImmutableList.of(parseTypeSignature("map(K,V)"), parseTypeSignature("K")), false)); } @Override public SqlFunctionVisibility getVisibility() { return PUBLIC; } @Override public boolean isDeterministic() { return true; } @Override public String getDescription() { return "Get value for the given key, or null if it does not exist"; } @Override public BuiltInScalarFunctionImplementation specialize(BoundVariables boundVariables, int arity, FunctionAndTypeManager functionAndTypeManager) { Type keyType = boundVariables.getTypeVariable("K"); Type valueType = boundVariables.getTypeVariable("V"); MethodHandle keyNativeHashCode = functionAndTypeManager.getBuiltInScalarFunctionImplementation(functionAndTypeManager.resolveOperator(OperatorType.HASH_CODE, fromTypes(keyType))).getMethodHandle(); MethodHandle keyBlockHashCode = compose(keyNativeHashCode, nativeValueGetter(keyType)); MethodHandle keyNativeEquals = functionAndTypeManager.getBuiltInScalarFunctionImplementation(functionAndTypeManager.resolveOperator(OperatorType.EQUAL, fromTypes(keyType, keyType))).getMethodHandle(); MethodHandle keyBlockNativeEquals = compose(keyNativeEquals, nativeValueGetter(keyType)); MethodHandle methodHandle; if (keyType.getJavaType() == boolean.class) { methodHandle = METHOD_HANDLE_BOOLEAN; } else if (keyType.getJavaType() == long.class) { methodHandle = METHOD_HANDLE_LONG; } else if (keyType.getJavaType() == double.class) { methodHandle = METHOD_HANDLE_DOUBLE; } else if (keyType.getJavaType() == Slice.class) { methodHandle = METHOD_HANDLE_SLICE; } else { methodHandle = METHOD_HANDLE_OBJECT; } methodHandle = methodHandle.bindTo(keyNativeHashCode).bindTo(keyBlockNativeEquals).bindTo(keyBlockHashCode).bindTo(valueType); methodHandle = methodHandle.asType(methodHandle.type().changeReturnType(Primitives.wrap(valueType.getJavaType()))); return new BuiltInScalarFunctionImplementation( true, ImmutableList.of( valueTypeArgumentProperty(RETURN_NULL_ON_NULL), valueTypeArgumentProperty(RETURN_NULL_ON_NULL)), methodHandle); } @UsedByGeneratedCode public static Object elementAt(MethodHandle keyNativeHashCode, MethodHandle keyBlockNativeEquals, MethodHandle keyBlockHashCode, Type valueType, Block map, boolean key) { SingleMapBlock mapBlock = (SingleMapBlock) map; int valuePosition; try { valuePosition = mapBlock.seekKeyExact(key, keyNativeHashCode, keyBlockNativeEquals, keyBlockHashCode); } catch (NotSupportedException e) { throw new PrestoException(NOT_SUPPORTED, e.getMessage(), e); } if (valuePosition == -1) { return null; } return readNativeValue(valueType, mapBlock, valuePosition); } @UsedByGeneratedCode public static Object elementAt(MethodHandle keyNativeHashCode, MethodHandle keyBlockNativeEquals, MethodHandle keyBlockHashCode, Type valueType, Block map, long key) { SingleMapBlock mapBlock = (SingleMapBlock) map; int valuePosition; try { valuePosition = mapBlock.seekKeyExact(key, keyNativeHashCode, keyBlockNativeEquals, keyBlockHashCode); } catch (NotSupportedException e) { throw new PrestoException(NOT_SUPPORTED, e.getMessage(), e); } if (valuePosition == -1) { return null; } return readNativeValue(valueType, mapBlock, valuePosition); } @UsedByGeneratedCode public static Object elementAt(MethodHandle keyNativeHashCode, MethodHandle keyBlockNativeEquals, MethodHandle keyBlockHashCode, Type valueType, Block map, double key) { SingleMapBlock mapBlock = (SingleMapBlock) map; int valuePosition; try { valuePosition = mapBlock.seekKeyExact(key, keyNativeHashCode, keyBlockNativeEquals, keyBlockHashCode); } catch (NotSupportedException e) { throw new PrestoException(NOT_SUPPORTED, e.getMessage(), e); } if (valuePosition == -1) { return null; } return readNativeValue(valueType, mapBlock, valuePosition); } @UsedByGeneratedCode public static Object elementAt(MethodHandle keyNativeHashCode, MethodHandle keyBlockNativeEquals, MethodHandle keyBlockHashCode, Type valueType, Block map, Slice key) { SingleMapBlock mapBlock = (SingleMapBlock) map; int valuePosition; try { valuePosition = mapBlock.seekKeyExact(key, keyNativeHashCode, keyBlockNativeEquals, keyBlockHashCode); } catch (NotSupportedException e) { throw new PrestoException(NOT_SUPPORTED, e.getMessage(), e); } if (valuePosition == -1) { return null; } return readNativeValue(valueType, mapBlock, valuePosition); } @UsedByGeneratedCode public static Object elementAt(MethodHandle keyNativeHashCode, MethodHandle keyBlockNativeEquals, MethodHandle keyBlockHashCode, Type valueType, Block map, Object key) { SingleMapBlock mapBlock = (SingleMapBlock) map; int valuePosition; try { valuePosition = mapBlock.seekKeyExact((Block) key, keyNativeHashCode, keyBlockNativeEquals, keyBlockHashCode); } catch (NotSupportedException e) { throw new PrestoException(NOT_SUPPORTED, e.getMessage(), e); } if (valuePosition == -1) { return null; } return readNativeValue(valueType, mapBlock, valuePosition); } }
package com.dianping.polestar.lang; import java.io.InputStream; import java.lang.reflect.Constructor; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; public class ClassUtils { private static final Log LOG = LogFactory.getLog(ClassUtils.class); private static final ClassLoaderAccessor THREAD_CL_ACCESSOR = new ExceptionIgnoringAccessor() { @Override protected ClassLoader doGetClassLoader() throws Throwable { return Thread.currentThread().getContextClassLoader(); } }; private static final ClassLoaderAccessor CLASS_CL_ACCESSOR = new ExceptionIgnoringAccessor() { @Override protected ClassLoader doGetClassLoader() throws Throwable { return ClassUtils.class.getClassLoader(); } }; private static final ClassLoaderAccessor SYSTEM_CL_ACCESSOR = new ExceptionIgnoringAccessor() { @Override protected ClassLoader doGetClassLoader() throws Throwable { return ClassLoader.getSystemClassLoader(); } }; public static InputStream getResourceAsStream(String name) { InputStream is = THREAD_CL_ACCESSOR.getResourceStream(name); if (is == null) { if (LOG.isTraceEnabled()) { LOG.trace("Resource [" + name + "] was not found via the thread context ClassLoader. Trying the " + "current ClassLoader..."); } is = CLASS_CL_ACCESSOR.getResourceStream(name); } if (is == null) { if (LOG.isTraceEnabled()) { LOG.trace("Resource [" + name + "] was not found via the current class loader. Trying the " + "system/application ClassLoader..."); } is = SYSTEM_CL_ACCESSOR.getResourceStream(name); } if (is == null && LOG.isTraceEnabled()) { LOG.trace("Resource [" + name + "] was not found via the thread context, current, or " + "system/application ClassLoaders. All heuristics have been exhausted. Returning null."); } return is; } public static Class<?> forName(String fqcn) throws RuntimeException { Class<?> clazz = THREAD_CL_ACCESSOR.loadClass(fqcn); if (clazz == null) { if (LOG.isTraceEnabled()) { LOG.trace("Unable to load class named [" + fqcn + "] from the thread context ClassLoader. Trying the current ClassLoader..."); } clazz = CLASS_CL_ACCESSOR.loadClass(fqcn); } if (clazz == null) { if (LOG.isTraceEnabled()) { LOG.trace("Unable to load class named [" + fqcn + "] from the current ClassLoader. " + "Trying the system/application ClassLoader..."); } clazz = SYSTEM_CL_ACCESSOR.loadClass(fqcn); } if (clazz == null) { String msg = "Unable to load class named [" + fqcn + "] from the thread context, current, or " + "system/application ClassLoaders. All heuristics have been exhausted. Class could not be found."; throw new RuntimeException(msg); } return clazz; } public static boolean isAvailable(String fullyQualifiedClassName) { try { forName(fullyQualifiedClassName); return true; } catch (RuntimeException e) { return false; } } public static Object newInstance(String fqcn) { return newInstance(forName(fqcn)); } public static Object newInstance(String fqcn, Object... args) { return newInstance(forName(fqcn), args); } public static Object newInstance(Class<?> clazz) { if (clazz == null) { String msg = "Class method parameter cannot be null."; throw new IllegalArgumentException(msg); } try { return clazz.newInstance(); } catch (Exception e) { throw new RuntimeException("Unable to instantiate class [" + clazz.getName() + "]", e); } } @SuppressWarnings("rawtypes") public static Object newInstance(Class<?> clazz, Object... args) { Class[] argTypes = new Class[args.length]; for (int i = 0; i < args.length; i++) { argTypes[i] = args[i].getClass(); } Constructor<?> ctor = getConstructor(clazz, argTypes); return instantiate(ctor, args); } public static Constructor<?> getConstructor(Class<?> clazz, Class<?>... argTypes) { try { return clazz.getConstructor(argTypes); } catch (NoSuchMethodException e) { throw new IllegalStateException(e); } } public static Object instantiate(Constructor<?> ctor, Object... args) { try { return ctor.newInstance(args); } catch (Exception e) { String msg = "Unable to instantiate Permission instance with constructor [" + ctor + "]"; throw new RuntimeException(msg, e); } } private static interface ClassLoaderAccessor { Class<?> loadClass(String fqcn); InputStream getResourceStream(String name); } private static abstract class ExceptionIgnoringAccessor implements ClassLoaderAccessor { public Class<?> loadClass(String fqcn) { Class<?> clazz = null; ClassLoader cl = getClassLoader(); if (cl != null) { try { clazz = cl.loadClass(fqcn); } catch (ClassNotFoundException e) { if (LOG.isTraceEnabled()) { LOG.trace("Unable to load clazz named [" + fqcn + "] from class loader [" + cl + "]"); } } } return clazz; } public InputStream getResourceStream(String name) { InputStream is = null; ClassLoader cl = getClassLoader(); if (cl != null) { is = cl.getResourceAsStream(name); } return is; } protected final ClassLoader getClassLoader() { try { return doGetClassLoader(); } catch (Throwable t) { if (LOG.isDebugEnabled()) { LOG.debug("Unable to acquire ClassLoader.", t); } } return null; } protected abstract ClassLoader doGetClassLoader() throws Throwable; } }
/* * Copyright 2017 Eric Bishton * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.plugins.haxe.haxelib; import com.intellij.openapi.projectRoots.Sdk; import com.intellij.openapi.roots.OrderRootType; import com.intellij.openapi.roots.libraries.Library; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.plugins.haxe.lang.psi.HaxeClass; import com.intellij.plugins.haxe.util.HaxeDebugLogger; import com.intellij.plugins.haxe.util.HaxeFileUtil; import org.apache.commons.lang.NotImplementedException; import org.apache.log4j.Level; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.security.InvalidParameterException; import java.util.*; public class HaxeLibrary { private static HaxeDebugLogger LOG = HaxeDebugLogger.getLogger(); static {LOG.setLevel(Level.DEBUG);} // Remove when finished debugging. private static String CURRENT_DIR = "."; final private HaxelibLibraryCache myCache; final private String myName; final private String myLibraryRoot; final private String myRelativeClasspath; final private HaxelibMetadata myMetadata; final private HaxelibSemVer mySemVer; // TODO: Add the extraParams.hxml data here. Use the hxml parser; see LimeUtil.getLimeProjectModel() as an example. private HaxeLibrary(@NotNull String name, @NotNull VirtualFile libraryRoot, @NotNull HaxelibLibraryCache owner) { myCache = owner; myLibraryRoot = libraryRoot.getUrl(); myMetadata = HaxelibMetadata.load(libraryRoot); HaxeLibraryInfo pathInfo = HaxelibUtil.deriveLibraryInfoFromPath(owner.getSdk(), libraryRoot.getPath()); String mdname = myMetadata.getName(); if (null != mdname && !mdname.isEmpty()) { myName = mdname; } else if (!name.isEmpty()) { myName = name; } else { myName = pathInfo == null ? "" : pathInfo.getName(); } HaxelibSemVer semVer = HaxelibSemVer.create(myMetadata.getVersion()); if (HaxelibSemVer.ZERO_VERSION == semVer && pathInfo != null) { semVer = pathInfo.getVersion(); } mySemVer = semVer; String cp = myMetadata.getClasspath(); if ((null == cp || cp.isEmpty()) && pathInfo != null) { cp = pathInfo.getClasspath(); } if (null != cp && !cp.isEmpty()) { myRelativeClasspath = cp; } else { myRelativeClasspath = CURRENT_DIR; } } /** * Get the list of libraries that this library depends upon (but not /their/ dependents). * * Private because it's only used by collectDependents. * Returns a List instead of a HaxeLibraryList to keep synchronized access down. */ @NotNull private List<HaxeLibraryDependency> getDirectDependents() { List<HaxelibMetadata.Dependency> mdDependencies = myMetadata.getDependencies(); if (null == mdDependencies || mdDependencies.isEmpty()) { return Collections.emptyList(); } List<HaxeLibraryDependency> dependencies = new ArrayList<HaxeLibraryDependency>(mdDependencies.size()); for (HaxelibMetadata.Dependency md : mdDependencies) { HaxeLibraryDependency newdep = new HaxeLibraryDependency(myCache, md.getName(), md.getVersion(), this); dependencies.add(newdep); } return dependencies; } /** * Get all dependent libraries in search order. */ @NotNull public HaxeLibraryList collectDependents() { LinkedHashMap<String, HaxeLibraryDependency> collection = new LinkedHashMap<String, HaxeLibraryDependency>(); collectDependentsInternal(collection); HaxeLibraryList list = new HaxeLibraryList(myCache.getSdk()); for (HaxeLibraryDependency dep : collection.values()) { list.add(dep); } return list; } private void collectDependentsInternal(/*modifies*/ final @NotNull LinkedHashMap<String, HaxeLibraryDependency> collection) { List<HaxeLibraryDependency> dependencies = getDirectDependents(); for (HaxeLibraryDependency dependency : dependencies) { if (!collection.containsKey(dependency.getKey())) { // Don't go down the same path again... // TODO: Deal with version mismatches here. Add multiple versions, but don't add a specific version if the latest version is equal to it. collection.put(dependency.getKey(), dependency); HaxeLibrary depLib = dependency.getLibrary(); if (null != depLib) { depLib.collectDependentsInternal(collection); } // TODO: Else mark dependency unfulfilled somehow?? } else { HaxeLibraryDependency contained = collection.get(dependency.getKey()); LOG.assertLog(contained != null, "Couldn't get a contained object."); if (contained != null) { contained.addReliant(dependency); } } } } /** * Get the internal name of the library. */ @NotNull public String getName() { return myName; } /** * Get the display name of the library. */ @NotNull public String getPresentableName() { // TODO: Figure out what extra decorations we might need, like the version, 'dependency of', etc. return getName(); } @Nullable public HaxeClasspath getClasspathEntries() { HaxeClasspath cp = new HaxeClasspath(); cp.add(getSourceRoot()); return cp; } @NotNull public HaxeClasspathEntry getSourceRoot() { if (CURRENT_DIR == myRelativeClasspath) { return getLibraryRoot(); } return new HaxeClasspathEntry(myName, HaxeFileUtil.joinPath(myLibraryRoot, myRelativeClasspath)); } @NotNull public HaxeClasspathEntry getLibraryRoot() { return new HaxeClasspathEntry(myName, myLibraryRoot); } @NotNull public HaxelibSemVer getVersion() { return mySemVer; } /** * Load a library from disk. This *DOES NOT* place the library into the library manager. * * @param libName - name of the library (as haxelib understands it) to load. * @return the loaded HaxeLibrary of the given name; null if not found. */ @Nullable public static HaxeLibrary load(HaxelibLibraryCache owner, String libName, Sdk sdk) { // Ask haxelib for the path to this library. VirtualFile libraryRoot = HaxelibUtil.getLibraryRoot(sdk, libName); if (null == libraryRoot) { // XXX: This case might occur if the library is not managed by haxelib, but then // that should be a classpath, not a lib. return null; } try { return new HaxeLibrary(libName, libraryRoot, owner); } catch (InvalidParameterException e) { ; // libName must not have been an url } return null; } /** * Create a new reference for this library. * @param isManaged whether or not this reference is a "managed reference". */ @NotNull public HaxeLibraryReference createReference(boolean isManaged) { return new HaxeLibraryReference(myCache, myName, mySemVer, isManaged); } /** * Create a new unmanaged reference for this library. */ @NotNull public HaxeLibraryReference createReference() { return new HaxeLibraryReference(myCache, myName, mySemVer); } @NotNull public HaxeLibraryReference createReference(HaxelibSemVer override) { return new HaxeLibraryReference(myCache, myName, override); } /** * Test whether this library is effectively the same as a Library appearing * in IDEA's library tables. * * @param lib - Library to test. * @return true if this library uses the same sources as the IDEA library; false otherwise. */ public boolean matchesIdeaLib(Library lib) { if (null == lib) { return false; } HaxeClasspath cp = getClasspathEntries(); VirtualFile[] sources = lib.getFiles(OrderRootType.SOURCES); for (VirtualFile file : sources) { if (!cp.containsUrl(file.getUrl())) { return false; } } return cp.size() == sources.length; } }
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. */ package com.microsoft.azure.management.search.samples; import com.microsoft.azure.management.Azure; import com.microsoft.azure.management.resources.fluentcore.arm.Region; import com.microsoft.azure.management.resources.fluentcore.utils.SdkContext; import com.microsoft.azure.management.samples.Utils; import com.microsoft.azure.management.search.AdminKeyKind; import com.microsoft.azure.management.search.SearchService; import com.microsoft.azure.management.search.SkuName; import com.microsoft.rest.LogLevel; import java.io.File; import java.util.Date; import java.util.List; /** * Azure Search sample for managing search service. * - Create a Search service resource with a free SKU * - Create a Search service resource with a standard SKU, one replica and one partition * - Create a new query key and delete a query key * - Update the Search service with three replicas and three partitions * - Regenerate the primary and secondary admin keys * - Delete the Search service */ public class ManageSearchService { /** * Main function which runs the actual sample. * * @param azure instance of the azure client * @return true if sample runs successfully */ public static boolean runSample(Azure azure) { final String rgName = SdkContext.randomResourceName("rgSearch", 15); final String searchServiceName = SdkContext.randomResourceName("ssrv", 20); final Region region = Region.US_EAST; try { //============================================================= // Check if the name for the Azure Search service to be created is available if (!azure.searchServices().checkNameAvailability(searchServiceName).isAvailable()) { return false; } Date t1, t2; // Azure limits the number of free Search service resource to one per subscription // List all Search services in the subscription and skip if there is already one resource of type free SKU boolean createFreeService = true; List<SearchService> resources = azure.searchServices().list(); for (SearchService item : resources) { if (item.sku().name() == SkuName.FREE) { createFreeService = false; break; } } if (createFreeService) { //============================================================= // Create a Azure Search service resource with a "free" SKU System.out.println("Creating an Azure Search service using \"free\" SKU"); t1 = new Date(); SearchService searchServiceFree = azure.searchServices().define(searchServiceName + "free") .withRegion(region) .withNewResourceGroup(rgName) .withFreeSku() .create(); t2 = new Date(); System.out.println("Created Azure Search service: (took " + ((t2.getTime() - t1.getTime()) / 1000) + " seconds) " + searchServiceFree.id()); Utils.print(searchServiceFree); } //============================================================= // Create an Azure Search service resource System.out.println("Creating an Azure Search service"); t1 = new Date(); SearchService searchService = azure.searchServices().define(searchServiceName) .withRegion(region) .withNewResourceGroup(rgName) .withStandardSku() .withPartitionCount(1) .withReplicaCount(1) .create(); t2 = new Date(); System.out.println("Created Azure Search service: (took " + ((t2.getTime() - t1.getTime()) / 1000) + " seconds) " + searchService.id()); Utils.print(searchService); //============================================================= // Iterate through the Azure Search service resources System.out.println("List all the Azure Search services for a given resource group"); for (SearchService service : azure.searchServices().listByResourceGroup(rgName)) { Utils.print(service); } //============================================================= // Add a query key for the Search service resource System.out.println("Add a query key to an Azure Search service"); searchService.createQueryKey("testKey1"); //============================================================= // Regenerate the admin keys for an Azure Search service resource System.out.println("Regenerate the admin keys for an Azure Search service"); searchService.regenerateAdminKeys(AdminKeyKind.PRIMARY); searchService.regenerateAdminKeys(AdminKeyKind.SECONDARY); //============================================================= // Update the Search service to use three replicas and three partitions and update the tags System.out.println("Update an Azure Search service"); searchService = searchService.update() .withTag("tag2", "value2") .withTag("tag3", "value3") .withoutTag("tag1") .withReplicaCount(2) .withPartitionCount(2) .apply(); Utils.print(searchService); //============================================================= // Delete a query key for an Azure Search service resource System.out.println("Delete a query key for an Azure Search service"); searchService.deleteQueryKey(searchService.listQueryKeys().get(1).key()); Utils.print(searchService); //============================================================= // Delete the Search service resource System.out.println("Delete an Azure Search service resource"); azure.searchServices().deleteByResourceGroup(rgName, searchServiceName); return true; } catch (Exception f) { System.out.println(f.getMessage()); f.printStackTrace(); } finally { try { System.out.println("Deleting Resource Group: " + rgName); azure.resourceGroups().beginDeleteByName(rgName); System.out.println("Deleted Resource Group: " + rgName); } catch (NullPointerException npe) { System.out.println("Did not create any resources in Azure. No clean up is necessary"); } catch (Exception g) { g.printStackTrace(); } } return false; } /** * Main entry point. * * @param args the parameters */ public static void main(String[] args) { try { //============================================================= // Authenticate final File credFile = new File(System.getenv("AZURE_AUTH_LOCATION")); Azure azure = Azure.configure() .withLogLevel(LogLevel.BODY) .authenticate(credFile) .withDefaultSubscription(); // Print selected subscription System.out.println("Selected subscription: " + azure.subscriptionId()); runSample(azure); } catch (Exception e) { System.out.println(e.getMessage()); e.printStackTrace(); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.server.resourcemanager.reservation; import java.util.HashMap; import java.util.List; import java.util.ListIterator; import java.util.Map; import java.util.Set; import org.apache.hadoop.yarn.api.records.ReservationDefinition; import org.apache.hadoop.yarn.api.records.ReservationId; import org.apache.hadoop.yarn.api.records.ReservationRequest; import org.apache.hadoop.yarn.api.records.ReservationRequestInterpreter; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.server.resourcemanager.reservation.exceptions.ContractValidationException; import org.apache.hadoop.yarn.server.resourcemanager.reservation.exceptions.PlanningException; import org.apache.hadoop.yarn.util.resource.Resources; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This Agent employs a simple greedy placement strategy, placing the various * stages of a {@link ReservationRequest} from the deadline moving backward * towards the arrival. This allows jobs with earlier deadline to be scheduled * greedily as well. Combined with an opportunistic anticipation of work if the * cluster is not fully utilized also seems to provide good latency for * best-effort jobs (i.e., jobs running without a reservation). * * This agent does not account for locality and only consider container * granularity for validation purposes (i.e., you can't exceed max-container * size). */ public class GreedyReservationAgent implements ReservationAgent { private static final Logger LOG = LoggerFactory .getLogger(GreedyReservationAgent.class); @Override public boolean createReservation(ReservationId reservationId, String user, Plan plan, ReservationDefinition contract) throws PlanningException { return computeAllocation(reservationId, user, plan, contract, null); } @Override public boolean updateReservation(ReservationId reservationId, String user, Plan plan, ReservationDefinition contract) throws PlanningException { return computeAllocation(reservationId, user, plan, contract, plan.getReservationById(reservationId)); } @Override public boolean deleteReservation(ReservationId reservationId, String user, Plan plan) throws PlanningException { return plan.deleteReservation(reservationId); } private boolean computeAllocation(ReservationId reservationId, String user, Plan plan, ReservationDefinition contract, ReservationAllocation oldReservation) throws PlanningException, ContractValidationException { LOG.info("placing the following ReservationRequest: " + contract); Resource totalCapacity = plan.getTotalCapacity(); // Here we can addd logic to adjust the ResourceDefinition to account for // system "imperfections" (e.g., scheduling delays for large containers). // Align with plan step conservatively (i.e., ceil arrival, and floor // deadline) long earliestStart = contract.getArrival(); long step = plan.getStep(); if (earliestStart % step != 0) { earliestStart = earliestStart + (step - (earliestStart % step)); } long deadline = contract.getDeadline() - contract.getDeadline() % plan.getStep(); // setup temporary variables to handle time-relations between stages and // intermediate answers long curDeadline = deadline; long oldDeadline = -1; Map<ReservationInterval, Resource> allocations = new HashMap<ReservationInterval, Resource>(); RLESparseResourceAllocation tempAssigned = new RLESparseResourceAllocation(plan.getResourceCalculator(), plan.getMinimumAllocation()); List<ReservationRequest> stages = contract.getReservationRequests() .getReservationResources(); ReservationRequestInterpreter type = contract.getReservationRequests() .getInterpreter(); boolean hasGang = false; // Iterate the stages in backward from deadline for (ListIterator<ReservationRequest> li = stages.listIterator(stages.size()); li.hasPrevious();) { ReservationRequest currentReservationStage = li.previous(); // validate the RR respect basic constraints validateInput(plan, currentReservationStage, totalCapacity); hasGang |= currentReservationStage.getConcurrency() > 1; // run allocation for a single stage Map<ReservationInterval, Resource> curAlloc = placeSingleStage(plan, tempAssigned, currentReservationStage, earliestStart, curDeadline, oldReservation, totalCapacity); if (curAlloc == null) { // if we did not find an allocation for the currentReservationStage // return null, unless the ReservationDefinition we are placing is of // type ANY if (type != ReservationRequestInterpreter.R_ANY) { throw new PlanningException("The GreedyAgent" + " couldn't find a valid allocation for your request"); } else { continue; } } else { // if we did find an allocation add it to the set of allocations allocations.putAll(curAlloc); // if this request is of type ANY we are done searching (greedy) // and can return the current allocation (break-out of the search) if (type == ReservationRequestInterpreter.R_ANY) { break; } // if the request is of ORDER or ORDER_NO_GAP we constraint the next // round of allocation to precede the current allocation, by setting // curDeadline if (type == ReservationRequestInterpreter.R_ORDER || type == ReservationRequestInterpreter.R_ORDER_NO_GAP) { curDeadline = findEarliestTime(curAlloc.keySet()); // for ORDER_NO_GAP verify that the allocation found so far has no // gap, return null otherwise (the greedy procedure failed to find a // no-gap // allocation) if (type == ReservationRequestInterpreter.R_ORDER_NO_GAP && oldDeadline > 0) { if (oldDeadline - findLatestTime(curAlloc.keySet()) > plan .getStep()) { throw new PlanningException("The GreedyAgent" + " couldn't find a valid allocation for your request"); } } // keep the variable oldDeadline pointing to the last deadline we // found oldDeadline = curDeadline; } } } // / If we got here is because we failed to find an allocation for the // ReservationDefinition give-up and report failure to the user if (allocations.isEmpty()) { throw new PlanningException("The GreedyAgent" + " couldn't find a valid allocation for your request"); } // create reservation with above allocations if not null/empty Resource ZERO_RES = Resource.newInstance(0, 0); long firstStartTime = findEarliestTime(allocations.keySet()); // add zero-padding from arrival up to the first non-null allocation // to guarantee that the reservation exists starting at arrival if (firstStartTime > earliestStart) { allocations.put(new ReservationInterval(earliestStart, firstStartTime), ZERO_RES); firstStartTime = earliestStart; // consider to add trailing zeros at the end for simmetry } // Actually add/update the reservation in the plan. // This is subject to validation as other agents might be placing // in parallel and there might be sharing policies the agent is not // aware off. ReservationAllocation capReservation = new InMemoryReservationAllocation(reservationId, contract, user, plan.getQueueName(), firstStartTime, findLatestTime(allocations.keySet()), allocations, plan.getResourceCalculator(), plan.getMinimumAllocation(), hasGang); if (oldReservation != null) { return plan.updateReservation(capReservation); } else { return plan.addReservation(capReservation); } } private void validateInput(Plan plan, ReservationRequest rr, Resource totalCapacity) throws ContractValidationException { if (rr.getConcurrency() < 1) { throw new ContractValidationException("Gang Size should be >= 1"); } if (rr.getNumContainers() <= 0) { throw new ContractValidationException("Num containers should be >= 0"); } // check that gangSize and numContainers are compatible if (rr.getNumContainers() % rr.getConcurrency() != 0) { throw new ContractValidationException( "Parallelism must be an exact multiple of gang size"); } // check that the largest container request does not exceed // the cluster-wide limit for container sizes if (Resources.greaterThan(plan.getResourceCalculator(), totalCapacity, rr.getCapability(), plan.getMaximumAllocation())) { throw new ContractValidationException("Individual" + " capability requests should not exceed cluster's maxAlloc"); } } /** * This method actually perform the placement of an atomic stage of the * reservation. The key idea is to traverse the plan backward for a * "lease-duration" worth of time, and compute what is the maximum multiple of * our concurrency (gang) parameter we can fit. We do this and move towards * previous instant in time until the time-window is exhausted or we placed * all the user request. */ private Map<ReservationInterval, Resource> placeSingleStage( Plan plan, RLESparseResourceAllocation tempAssigned, ReservationRequest rr, long earliestStart, long curDeadline, ReservationAllocation oldResAllocation, final Resource totalCapacity) { Map<ReservationInterval, Resource> allocationRequests = new HashMap<ReservationInterval, Resource>(); // compute the gang as a resource and get the duration Resource gang = Resources.multiply(rr.getCapability(), rr.getConcurrency()); long dur = rr.getDuration(); long step = plan.getStep(); // ceil the duration to the next multiple of the plan step if (dur % step != 0) { dur += (step - (dur % step)); } // we know for sure that this division has no remainder (part of contract // with user, validate before int gangsToPlace = rr.getNumContainers() / rr.getConcurrency(); int maxGang = 0; // loop trying to place until we are done, or we are considering // an invalid range of times while (gangsToPlace > 0 && curDeadline - dur >= earliestStart) { // as we run along we remember how many gangs we can fit, and what // was the most constraining moment in time (we will restart just // after that to place the next batch) maxGang = gangsToPlace; long minPoint = curDeadline; int curMaxGang = maxGang; // start placing at deadline (excluded due to [,) interval semantics and // move backward for (long t = curDeadline - plan.getStep(); t >= curDeadline - dur && maxGang > 0; t = t - plan.getStep()) { // As we run along we will logically remove the previous allocation for // this reservation // if one existed Resource oldResCap = Resource.newInstance(0, 0); if (oldResAllocation != null) { oldResCap = oldResAllocation.getResourcesAtTime(t); } // compute net available resources Resource netAvailableRes = Resources.clone(totalCapacity); Resources.addTo(netAvailableRes, oldResCap); Resources.subtractFrom(netAvailableRes, plan.getTotalCommittedResources(t)); Resources.subtractFrom(netAvailableRes, tempAssigned.getCapacityAtTime(t)); // compute maximum number of gangs we could fit curMaxGang = (int) Math.floor(Resources.divide(plan.getResourceCalculator(), totalCapacity, netAvailableRes, gang)); // pick the minimum between available resources in this instant, and how // many gangs we have to place curMaxGang = Math.min(gangsToPlace, curMaxGang); // compare with previous max, and set it. also remember *where* we found // the minimum (useful for next attempts) if (curMaxGang <= maxGang) { maxGang = curMaxGang; minPoint = t; } } // if we were able to place any gang, record this, and decrement // gangsToPlace if (maxGang > 0) { gangsToPlace -= maxGang; ReservationInterval reservationInt = new ReservationInterval(curDeadline - dur, curDeadline); ReservationRequest reservationRequest = ReservationRequest.newInstance(rr.getCapability(), rr.getConcurrency() * maxGang, rr.getConcurrency(), rr.getDuration()); // remember occupied space (plan is read-only till we find a plausible // allocation for the entire request). This is needed since we might be // placing other ReservationRequest within the same // ReservationDefinition, // and we must avoid double-counting the available resources final Resource reservationRes = ReservationSystemUtil.toResource( reservationRequest); tempAssigned.addInterval(reservationInt, reservationRes); allocationRequests.put(reservationInt, reservationRes); } // reset our new starting point (curDeadline) to the most constraining // point so far, we will look "left" of that to find more places where // to schedule gangs (for sure nothing on the "right" of this point can // fit a full gang. curDeadline = minPoint; } // if no gangs are left to place we succeed and return the allocation if (gangsToPlace == 0) { return allocationRequests; } else { // If we are here is becasue we did not manage to satisfy this request. // So we need to remove unwanted side-effect from tempAssigned (needed // for ANY). for (Map.Entry<ReservationInterval, Resource> tempAllocation : allocationRequests.entrySet()) { tempAssigned.removeInterval(tempAllocation.getKey(), tempAllocation.getValue()); } // and return null to signal failure in this allocation return null; } } // finds the leftmost point of this set of ReservationInterval private long findEarliestTime(Set<ReservationInterval> resInt) { long ret = Long.MAX_VALUE; for (ReservationInterval s : resInt) { if (s.getStartTime() < ret) { ret = s.getStartTime(); } } return ret; } // finds the rightmost point of this set of ReservationIntervals private long findLatestTime(Set<ReservationInterval> resInt) { long ret = Long.MIN_VALUE; for (ReservationInterval s : resInt) { if (s.getEndTime() > ret) { ret = s.getEndTime(); } } return ret; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.jmeter.protocol.jdbc; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.lang.reflect.Field; import java.sql.CallableStatement; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import org.apache.commons.collections.map.LRUMap; import org.apache.jmeter.samplers.SampleResult; import org.apache.jmeter.save.CSVSaveService; import org.apache.jmeter.testelement.AbstractTestElement; import org.apache.jmeter.testelement.TestStateListener; import org.apache.jmeter.threads.JMeterVariables; import org.apache.jmeter.util.JMeterUtils; import org.apache.jorphan.logging.LoggingManager; import org.apache.log.Logger; /** * A base class for all JDBC test elements handling the basics of a SQL request. * */ public abstract class AbstractJDBCTestElement extends AbstractTestElement implements TestStateListener{ private static final long serialVersionUID = 235L; private static final Logger log = LoggingManager.getLoggerForClass(); private static final String COMMA = ","; // $NON-NLS-1$ private static final char COMMA_CHAR = ','; private static final String UNDERSCORE = "_"; // $NON-NLS-1$ // String used to indicate a null value private static final String NULL_MARKER = JMeterUtils.getPropDefault("jdbcsampler.nullmarker","]NULL["); // $NON-NLS-1$ private static final int MAX_OPEN_PREPARED_STATEMENTS = JMeterUtils.getPropDefault("jdbcsampler.maxopenpreparedstatements", 100); private static final String INOUT = "INOUT"; // $NON-NLS-1$ private static final String OUT = "OUT"; // $NON-NLS-1$ // TODO - should the encoding be configurable? protected static final String ENCODING = "UTF-8"; // $NON-NLS-1$ // key: name (lowercase) from java.sql.Types; entry: corresponding int value private static final Map<String, Integer> mapJdbcNameToInt; // read-only after class init static { // based on e291. Getting the Name of a JDBC Type from javaalmanac.com // http://javaalmanac.com/egs/java.sql/JdbcInt2Str.html mapJdbcNameToInt = new HashMap<String, Integer>(); //Get all fields in java.sql.Types and store the corresponding int values Field[] fields = java.sql.Types.class.getFields(); for (int i=0; i<fields.length; i++) { try { String name = fields[i].getName(); Integer value = (Integer)fields[i].get(null); mapJdbcNameToInt.put(name.toLowerCase(java.util.Locale.ENGLISH),value); } catch (IllegalAccessException e) { throw new RuntimeException(e); // should not happen } } } // Query types (used to communicate with GUI) // N.B. These must not be changed, as they are used in the JMX files static final String SELECT = "Select Statement"; // $NON-NLS-1$ static final String UPDATE = "Update Statement"; // $NON-NLS-1$ static final String CALLABLE = "Callable Statement"; // $NON-NLS-1$ static final String PREPARED_SELECT = "Prepared Select Statement"; // $NON-NLS-1$ static final String PREPARED_UPDATE = "Prepared Update Statement"; // $NON-NLS-1$ static final String COMMIT = "Commit"; // $NON-NLS-1$ static final String ROLLBACK = "Rollback"; // $NON-NLS-1$ static final String AUTOCOMMIT_FALSE = "AutoCommit(false)"; // $NON-NLS-1$ static final String AUTOCOMMIT_TRUE = "AutoCommit(true)"; // $NON-NLS-1$ private String query = ""; // $NON-NLS-1$ private String dataSource = ""; // $NON-NLS-1$ private String queryType = SELECT; private String queryArguments = ""; // $NON-NLS-1$ private String queryArgumentsTypes = ""; // $NON-NLS-1$ private String variableNames = ""; // $NON-NLS-1$ private String resultVariable = ""; // $NON-NLS-1$ private String queryTimeout = ""; // $NON-NLS-1$ /** * Cache of PreparedStatements stored in a per-connection basis. Each entry of this * cache is another Map mapping the statement string to the actual PreparedStatement. * At one time a Connection is only held by one thread */ private static final Map<Connection, Map<String, PreparedStatement>> perConnCache = new ConcurrentHashMap<Connection, Map<String, PreparedStatement>>(); /** * Creates a JDBCSampler. */ protected AbstractJDBCTestElement() { } /** * Execute the test element. * * @param conn a {@link SampleResult} in case the test should sample; <code>null</code> if only execution is requested * @throws UnsupportedOperationException if the user provided incorrect query type */ protected byte[] execute(Connection conn) throws SQLException, UnsupportedEncodingException, IOException, UnsupportedOperationException { log.debug("executing jdbc"); Statement stmt = null; try { // Based on query return value, get results String _queryType = getQueryType(); if (SELECT.equals(_queryType)) { stmt = conn.createStatement(); stmt.setQueryTimeout(getIntegerQueryTimeout()); ResultSet rs = null; try { rs = stmt.executeQuery(getQuery()); return getStringFromResultSet(rs).getBytes(ENCODING); } finally { close(rs); } } else if (CALLABLE.equals(_queryType)) { CallableStatement cstmt = getCallableStatement(conn); int out[]=setArguments(cstmt); // A CallableStatement can return more than 1 ResultSets // plus a number of update counts. boolean hasResultSet = cstmt.execute(); String sb = resultSetsToString(cstmt,hasResultSet, out); return sb.getBytes(ENCODING); } else if (UPDATE.equals(_queryType)) { stmt = conn.createStatement(); stmt.setQueryTimeout(getIntegerQueryTimeout()); stmt.executeUpdate(getQuery()); int updateCount = stmt.getUpdateCount(); String results = updateCount + " updates"; return results.getBytes(ENCODING); } else if (PREPARED_SELECT.equals(_queryType)) { PreparedStatement pstmt = getPreparedStatement(conn); setArguments(pstmt); ResultSet rs = null; try { rs = pstmt.executeQuery(); return getStringFromResultSet(rs).getBytes(ENCODING); } finally { close(rs); } } else if (PREPARED_UPDATE.equals(_queryType)) { PreparedStatement pstmt = getPreparedStatement(conn); setArguments(pstmt); pstmt.executeUpdate(); String sb = resultSetsToString(pstmt,false,null); return sb.getBytes(ENCODING); } else if (ROLLBACK.equals(_queryType)){ conn.rollback(); return ROLLBACK.getBytes(ENCODING); } else if (COMMIT.equals(_queryType)){ conn.commit(); return COMMIT.getBytes(ENCODING); } else if (AUTOCOMMIT_FALSE.equals(_queryType)){ conn.setAutoCommit(false); return AUTOCOMMIT_FALSE.getBytes(ENCODING); } else if (AUTOCOMMIT_TRUE.equals(_queryType)){ conn.setAutoCommit(true); return AUTOCOMMIT_TRUE.getBytes(ENCODING); } else { // User provided incorrect query type throw new UnsupportedOperationException("Unexpected query type: "+_queryType); } } finally { close(stmt); } } private String resultSetsToString(PreparedStatement pstmt, boolean result, int[] out) throws SQLException, UnsupportedEncodingException { StringBuilder sb = new StringBuilder(); int updateCount = 0; if (!result) { updateCount = pstmt.getUpdateCount(); } do { if (result) { ResultSet rs = null; try { rs = pstmt.getResultSet(); sb.append(getStringFromResultSet(rs)).append("\n"); // $NON-NLS-1$ } finally { close(rs); } } else { sb.append(updateCount).append(" updates.\n"); } result = pstmt.getMoreResults(); if (!result) { updateCount = pstmt.getUpdateCount(); } } while (result || (updateCount != -1)); if (out!=null && pstmt instanceof CallableStatement){ ArrayList<Object> outputValues = new ArrayList<Object>(); CallableStatement cs = (CallableStatement) pstmt; sb.append("Output variables by position:\n"); for(int i=0; i < out.length; i++){ if (out[i]!=java.sql.Types.NULL){ Object o = cs.getObject(i+1); outputValues.add(o); sb.append("["); sb.append(i+1); sb.append("] "); sb.append(o); sb.append("\n"); } } String varnames[] = getVariableNames().split(COMMA); if(varnames.length > 0) { JMeterVariables jmvars = getThreadContext().getVariables(); for(int i = 0; i < varnames.length && i < outputValues.size(); i++) { String name = varnames[i].trim(); if (name.length()>0){ // Save the value in the variable if present Object o = outputValues.get(i); jmvars.put(name, o == null ? null : o.toString()); } } } } return sb.toString(); } private int[] setArguments(PreparedStatement pstmt) throws SQLException, IOException { if (getQueryArguments().trim().length()==0) { return new int[]{}; } String[] arguments = CSVSaveService.csvSplitString(getQueryArguments(), COMMA_CHAR); String[] argumentsTypes = getQueryArgumentsTypes().split(COMMA); if (arguments.length != argumentsTypes.length) { throw new SQLException("number of arguments ("+arguments.length+") and number of types ("+argumentsTypes.length+") are not equal"); } int[] outputs= new int[arguments.length]; for (int i = 0; i < arguments.length; i++) { String argument = arguments[i]; String argumentType = argumentsTypes[i]; String[] arg = argumentType.split(" "); String inputOutput=""; if (arg.length > 1) { argumentType = arg[1]; inputOutput=arg[0]; } int targetSqlType = getJdbcType(argumentType); try { if (!OUT.equalsIgnoreCase(inputOutput)){ if (argument.equals(NULL_MARKER)){ pstmt.setNull(i+1, targetSqlType); } else { pstmt.setObject(i+1, argument, targetSqlType); } } if (OUT.equalsIgnoreCase(inputOutput)||INOUT.equalsIgnoreCase(inputOutput)) { CallableStatement cs = (CallableStatement) pstmt; cs.registerOutParameter(i+1, targetSqlType); outputs[i]=targetSqlType; } else { outputs[i]=java.sql.Types.NULL; // can't have an output parameter type null } } catch (NullPointerException e) { // thrown by Derby JDBC (at least) if there are no "?" markers in statement throw new SQLException("Could not set argument no: "+(i+1)+" - missing parameter marker?"); } } return outputs; } private static int getJdbcType(String jdbcType) throws SQLException { Integer entry = mapJdbcNameToInt.get(jdbcType.toLowerCase(java.util.Locale.ENGLISH)); if (entry == null) { try { entry = Integer.decode(jdbcType); } catch (NumberFormatException e) { throw new SQLException("Invalid data type: "+jdbcType); } } return (entry).intValue(); } private CallableStatement getCallableStatement(Connection conn) throws SQLException { return (CallableStatement) getPreparedStatement(conn,true); } private PreparedStatement getPreparedStatement(Connection conn) throws SQLException { return getPreparedStatement(conn,false); } private PreparedStatement getPreparedStatement(Connection conn, boolean callable) throws SQLException { Map<String, PreparedStatement> preparedStatementMap = perConnCache.get(conn); if (null == preparedStatementMap ) { @SuppressWarnings("unchecked") // LRUMap is not generic Map<String, PreparedStatement> lruMap = new LRUMap(MAX_OPEN_PREPARED_STATEMENTS) { private static final long serialVersionUID = 1L; @Override protected boolean removeLRU(LinkEntry entry) { PreparedStatement preparedStatement = (PreparedStatement)entry.getValue(); close(preparedStatement); return true; } }; preparedStatementMap = Collections.<String, PreparedStatement>synchronizedMap(lruMap); // As a connection is held by only one thread, we cannot already have a // preparedStatementMap put by another thread perConnCache.put(conn, preparedStatementMap); } PreparedStatement pstmt = preparedStatementMap.get(getQuery()); if (null == pstmt) { if (callable) { pstmt = conn.prepareCall(getQuery()); } else { pstmt = conn.prepareStatement(getQuery()); } pstmt.setQueryTimeout(getIntegerQueryTimeout()); // PreparedStatementMap is associated to one connection so // 2 threads cannot use the same PreparedStatement map at the same time preparedStatementMap.put(getQuery(), pstmt); } else { int timeoutInS = getIntegerQueryTimeout(); if(pstmt.getQueryTimeout() != timeoutInS) { pstmt.setQueryTimeout(getIntegerQueryTimeout()); } } pstmt.clearParameters(); return pstmt; } private static void closeAllStatements(Collection<PreparedStatement> collection) { for (PreparedStatement pstmt : collection) { close(pstmt); } } /** * Gets a Data object from a ResultSet. * * @param rs * ResultSet passed in from a database query * @return a Data object * @throws java.sql.SQLException * @throws UnsupportedEncodingException */ private String getStringFromResultSet(ResultSet rs) throws SQLException, UnsupportedEncodingException { ResultSetMetaData meta = rs.getMetaData(); StringBuilder sb = new StringBuilder(); int numColumns = meta.getColumnCount(); for (int i = 1; i <= numColumns; i++) { sb.append(meta.getColumnName(i)); if (i==numColumns){ sb.append('\n'); } else { sb.append('\t'); } } JMeterVariables jmvars = getThreadContext().getVariables(); String varnames[] = getVariableNames().split(COMMA); String resultVariable = getResultVariable().trim(); List<Map<String, Object> > results = null; if(resultVariable.length() > 0) { results = new ArrayList<Map<String,Object> >(); jmvars.putObject(resultVariable, results); } int j = 0; while (rs.next()) { Map<String, Object> row = null; j++; for (int i = 1; i <= numColumns; i++) { Object o = rs.getObject(i); if(results != null) { if(row == null) { row = new HashMap<String, Object>(numColumns); results.add(row); } row.put(meta.getColumnName(i), o); } if (o instanceof byte[]) { o = new String((byte[]) o, ENCODING); } sb.append(o); if (i==numColumns){ sb.append('\n'); } else { sb.append('\t'); } if (i <= varnames.length) { // i starts at 1 String name = varnames[i - 1].trim(); if (name.length()>0){ // Save the value in the variable if present jmvars.put(name+UNDERSCORE+j, o == null ? null : o.toString()); } } } } // Remove any additional values from previous sample for(int i=0; i < varnames.length; i++){ String name = varnames[i].trim(); if (name.length()>0 && jmvars != null){ final String varCount = name+"_#"; // $NON-NLS-1$ // Get the previous count String prevCount = jmvars.get(varCount); if (prevCount != null){ int prev = Integer.parseInt(prevCount); for (int n=j+1; n <= prev; n++ ){ jmvars.remove(name+UNDERSCORE+n); } } jmvars.put(varCount, Integer.toString(j)); // save the current count } } return sb.toString(); } public static void close(Connection c) { try { if (c != null) { c.close(); } } catch (SQLException e) { log.warn("Error closing Connection", e); } } public static void close(Statement s) { try { if (s != null) { s.close(); } } catch (SQLException e) { log.warn("Error closing Statement " + s.toString(), e); } } public static void close(ResultSet rs) { try { if (rs != null) { rs.close(); } } catch (SQLException e) { log.warn("Error closing ResultSet", e); } } /** * @return the integer representation queryTimeout */ public int getIntegerQueryTimeout() { int timeout = 0; try { timeout = Integer.parseInt(queryTimeout); } catch (NumberFormatException nfe) { timeout = 0; } return timeout; } /** * @return the queryTimeout */ public String getQueryTimeout() { return queryTimeout ; } /** * @param queryTimeout query timeout in seconds */ public void setQueryTimeout(String queryTimeout) { this.queryTimeout = queryTimeout; } public String getQuery() { return query; } @Override public String toString() { StringBuilder sb = new StringBuilder(80); sb.append("["); // $NON-NLS-1$ sb.append(getQueryType()); sb.append("] "); // $NON-NLS-1$ sb.append(getQuery()); sb.append("\n"); sb.append(getQueryArguments()); sb.append("\n"); sb.append(getQueryArgumentsTypes()); return sb.toString(); } /** * @param query * The query to set. */ public void setQuery(String query) { this.query = query; } /** * @return Returns the dataSource. */ public String getDataSource() { return dataSource; } /** * @param dataSource * The dataSource to set. */ public void setDataSource(String dataSource) { this.dataSource = dataSource; } /** * @return Returns the queryType. */ public String getQueryType() { return queryType; } /** * @param queryType The queryType to set. */ public void setQueryType(String queryType) { this.queryType = queryType; } public String getQueryArguments() { return queryArguments; } public void setQueryArguments(String queryArguments) { this.queryArguments = queryArguments; } public String getQueryArgumentsTypes() { return queryArgumentsTypes; } public void setQueryArgumentsTypes(String queryArgumentsType) { this.queryArgumentsTypes = queryArgumentsType; } /** * @return the variableNames */ public String getVariableNames() { return variableNames; } /** * @param variableNames the variableNames to set */ public void setVariableNames(String variableNames) { this.variableNames = variableNames; } /** * @return the resultVariable */ public String getResultVariable() { return resultVariable ; } /** * @param resultVariable the variable name in which results will be stored */ public void setResultVariable(String resultVariable) { this.resultVariable = resultVariable; } /** * {@inheritDoc} * @see org.apache.jmeter.testelement.TestStateListener#testStarted() */ @Override public void testStarted() { testStarted(""); } /** * {@inheritDoc} * @see org.apache.jmeter.testelement.TestStateListener#testStarted(java.lang.String) */ @Override public void testStarted(String host) { cleanCache(); } /** * {@inheritDoc} * @see org.apache.jmeter.testelement.TestStateListener#testEnded() */ @Override public void testEnded() { testEnded(""); } /** * {@inheritDoc} * @see org.apache.jmeter.testelement.TestStateListener#testEnded(java.lang.String) */ @Override public void testEnded(String host) { cleanCache(); } /** * Clean cache of PreparedStatements */ private static final void cleanCache() { for (Map<String, PreparedStatement> element : perConnCache.values()) { closeAllStatements(element.values()); } perConnCache.clear(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.calcite.rel.type; import org.apache.calcite.sql.SqlCollation; import org.apache.calcite.sql.SqlIdentifier; import org.apache.calcite.sql.SqlIntervalQualifier; import org.apache.calcite.sql.parser.SqlParserPos; import org.apache.calcite.sql.type.BasicSqlType; import org.apache.calcite.sql.type.SqlTypeName; import org.apache.calcite.util.Pair; import org.apache.calcite.util.Util; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import java.io.Serializable; import java.nio.charset.Charset; import java.util.List; /** * RelDataTypeImpl is an abstract base for implementations of * {@link RelDataType}. * * <p>Identity is based upon the {@link #digest} field, which each derived class * should set during construction.</p> */ public abstract class RelDataTypeImpl implements RelDataType, RelDataTypeFamily { //~ Instance fields -------------------------------------------------------- protected final List<RelDataTypeField> fieldList; protected String digest; //~ Constructors ----------------------------------------------------------- /** * Creates a RelDataTypeImpl. * * @param fieldList List of fields */ protected RelDataTypeImpl(List<? extends RelDataTypeField> fieldList) { if (fieldList != null) { // Create a defensive copy of the list. this.fieldList = ImmutableList.copyOf(fieldList); } else { this.fieldList = null; } } /** * Default constructor, to allow derived classes such as * {@link BasicSqlType} to be {@link Serializable}. * * <p>(The serialization specification says that a class can be serializable * even if its base class is not serializable, provided that the base class * has a public or protected zero-args constructor.) */ protected RelDataTypeImpl() { this(null); } //~ Methods ---------------------------------------------------------------- public RelDataTypeField getField(String fieldName, boolean caseSensitive, boolean elideRecord) { for (RelDataTypeField field : fieldList) { if (Util.matches(caseSensitive, field.getName(), fieldName)) { return field; } } if (elideRecord) { final List<Slot> slots = Lists.newArrayList(); getFieldRecurse(slots, this, 0, fieldName, caseSensitive); loop: for (Slot slot : slots) { switch (slot.count) { case 0: break; // no match at this depth; try deeper case 1: return slot.field; default: break loop; // duplicate fields at this depth; abandon search } } } // Extra field if (fieldList.size() > 0) { final RelDataTypeField lastField = Iterables.getLast(fieldList); if (lastField.getName().equals("_extra")) { return new RelDataTypeFieldImpl( fieldName, -1, lastField.getType()); } } return null; } private static void getFieldRecurse(List<Slot> slots, RelDataTypeImpl type, int depth, String fieldName, boolean caseSensitive) { while (slots.size() <= depth) { slots.add(new Slot()); } final Slot slot = slots.get(depth); for (RelDataTypeField field : type.fieldList) { if (Util.matches(caseSensitive, field.getName(), fieldName)) { slot.count++; slot.field = field; } } // No point looking to depth + 1 if there is a hit at depth. if (slot.count == 0) { for (RelDataTypeField field : type.fieldList) { if (field.getType().isStruct()) { getFieldRecurse(slots, (RelDataTypeImpl) field.getType(), depth + 1, fieldName, caseSensitive); } } } } // implement RelDataType public List<RelDataTypeField> getFieldList() { assert isStruct(); return fieldList; } public List<String> getFieldNames() { return Pair.left(fieldList); } // implement RelDataType public int getFieldCount() { assert isStruct() : this; return fieldList.size(); } // implement RelDataType public RelDataType getComponentType() { // this is not a collection type return null; } public RelDataType getKeyType() { // this is not a map type return null; } public RelDataType getValueType() { // this is not a map type return null; } // implement RelDataType public boolean isStruct() { return fieldList != null; } // implement RelDataType public boolean equals(Object obj) { if (obj instanceof RelDataTypeImpl) { final RelDataTypeImpl that = (RelDataTypeImpl) obj; return this.digest.equals(that.digest); } return false; } // implement RelDataType public int hashCode() { return digest.hashCode(); } // implement RelDataType public String getFullTypeString() { return digest; } // implement RelDataType public boolean isNullable() { return false; } // implement RelDataType public Charset getCharset() { return null; } // implement RelDataType public SqlCollation getCollation() { return null; } // implement RelDataType public SqlIntervalQualifier getIntervalQualifier() { return null; } // implement RelDataType public int getPrecision() { return PRECISION_NOT_SPECIFIED; } // implement RelDataType public int getScale() { return SCALE_NOT_SPECIFIED; } // implement RelDataType public SqlTypeName getSqlTypeName() { return null; } // implement RelDataType public SqlIdentifier getSqlIdentifier() { SqlTypeName typeName = getSqlTypeName(); if (typeName == null) { return null; } return new SqlIdentifier( typeName.name(), SqlParserPos.ZERO); } // implement RelDataType public RelDataTypeFamily getFamily() { // by default, put each type into its own family return this; } /** * Generates a string representation of this type. * * @param sb StringBuffer into which to generate the string * @param withDetail when true, all detail information needed to compute a * unique digest (and return from getFullTypeString) should * be included; */ protected abstract void generateTypeString( StringBuilder sb, boolean withDetail); /** * Computes the digest field. This should be called in every non-abstract * subclass constructor once the type is fully defined. */ protected void computeDigest() { StringBuilder sb = new StringBuilder(); generateTypeString(sb, true); if (!isNullable()) { sb.append(" NOT NULL"); } digest = sb.toString(); } // implement RelDataType public String toString() { StringBuilder sb = new StringBuilder(); generateTypeString(sb, false); return sb.toString(); } // implement RelDataType public RelDataTypePrecedenceList getPrecedenceList() { // by default, make each type have a precedence list containing // only other types in the same family return new RelDataTypePrecedenceList() { public boolean containsType(RelDataType type) { return getFamily() == type.getFamily(); } public int compareTypePrecedence( RelDataType type1, RelDataType type2) { assert containsType(type1); assert containsType(type2); return 0; } }; } // implement RelDataType public RelDataTypeComparability getComparability() { return RelDataTypeComparability.ALL; } /** * Returns an implementation of * {@link RelProtoDataType} * that copies a given type using the given type factory. */ public static RelProtoDataType proto(final RelDataType protoType) { assert protoType != null; return new RelProtoDataType() { public RelDataType apply(RelDataTypeFactory typeFactory) { return typeFactory.copyType(protoType); } }; } /** Returns a {@link org.apache.calcite.rel.type.RelProtoDataType} * that will create a type {@code typeName}. * * <p>For example, {@code proto(SqlTypeName.DATE), false} * will create {@code DATE NOT NULL}.</p> * * @param typeName Type name * @param nullable Whether nullable * @return Proto data type */ public static RelProtoDataType proto(final SqlTypeName typeName, final boolean nullable) { assert typeName != null; return new RelProtoDataType() { public RelDataType apply(RelDataTypeFactory typeFactory) { final RelDataType type = typeFactory.createSqlType(typeName); return typeFactory.createTypeWithNullability(type, nullable); } }; } /** Returns a {@link org.apache.calcite.rel.type.RelProtoDataType} * that will create a type {@code typeName(precision)}. * * <p>For example, {@code proto(SqlTypeName.VARCHAR, 100, false)} * will create {@code VARCHAR(100) NOT NULL}.</p> * * @param typeName Type name * @param precision Precision * @param nullable Whether nullable * @return Proto data type */ public static RelProtoDataType proto(final SqlTypeName typeName, final int precision, final boolean nullable) { assert typeName != null; return new RelProtoDataType() { public RelDataType apply(RelDataTypeFactory typeFactory) { final RelDataType type = typeFactory.createSqlType(typeName, precision); return typeFactory.createTypeWithNullability(type, nullable); } }; } /** Returns a {@link org.apache.calcite.rel.type.RelProtoDataType} * that will create a type {@code typeName(precision, scale)}. * * <p>For example, {@code proto(SqlTypeName.DECIMAL, 7, 2, false)} * will create {@code DECIMAL(7, 2) NOT NULL}.</p> * * @param typeName Type name * @param precision Precision * @param scale Scale * @param nullable Whether nullable * @return Proto data type */ public static RelProtoDataType proto(final SqlTypeName typeName, final int precision, final int scale, final boolean nullable) { return new RelProtoDataType() { public RelDataType apply(RelDataTypeFactory typeFactory) { final RelDataType type = typeFactory.createSqlType(typeName, precision, scale); return typeFactory.createTypeWithNullability(type, nullable); } }; } /** * Returns the "extra" field in a row type whose presence signals that * fields will come into existence just by asking for them. * * @param rowType Row type * @return The "extra" field, or null */ public static RelDataTypeField extra(RelDataType rowType) { // Even in a case-insensitive connection, the name must be precisely // "_extra". return rowType.getField("_extra", true, false); } /** Work space for {@link RelDataTypeImpl#getFieldRecurse}. */ private static class Slot { int count; RelDataTypeField field; } } // End RelDataTypeImpl.java
/*! ****************************************************************************** * * Pentaho Data Integration * * Copyright (C) 2002-2020 by Hitachi Vantara : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.trans; import org.junit.Before; import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.runners.MockitoJUnitRunner; import org.pentaho.di.core.Const; import org.pentaho.di.core.KettleClientEnvironment; import org.pentaho.di.core.Props; import org.pentaho.di.core.Result; import org.pentaho.di.core.RowMetaAndData; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.logging.KettleLogStore; import org.pentaho.di.core.logging.LogChannel; import org.pentaho.di.core.logging.LogChannelInterfaceFactory; import org.pentaho.di.core.logging.LogLevel; import org.pentaho.di.core.logging.LoggingObject; import org.pentaho.di.core.logging.LoggingObjectInterface; import org.pentaho.di.core.plugins.PluginRegistry; import org.pentaho.di.core.plugins.StepPluginType; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.variables.Variables; import org.pentaho.di.junit.rules.RestorePDIEngineEnvironment; import org.pentaho.di.trans.step.StepStatus; import org.pentaho.di.trans.steps.transexecutor.TransExecutorParameters; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import static junit.framework.TestCase.fail; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.mockito.Matchers.any; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.atLeastOnce; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.never; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.verify; @RunWith( MockitoJUnitRunner.class ) public class SubtransExecutorTest { @Mock private LogChannelInterfaceFactory logChannelFactory; @Mock private LogChannel logChannel; @ClassRule public static RestorePDIEngineEnvironment env = new RestorePDIEngineEnvironment(); @Before public void setUp() throws Exception { KettleLogStore.setLogChannelInterfaceFactory( this.logChannelFactory ); doReturn( LogLevel.BASIC ).when( logChannel ).getLogLevel(); Mockito.when( this.logChannelFactory.create( any(), any() ) ).thenReturn( this.logChannel ); } @BeforeClass public static void init() throws Exception { KettleClientEnvironment.init(); PluginRegistry.addPluginType( StepPluginType.getInstance() ); PluginRegistry.init(); if ( !Props.isInitialized() ) { Props.init( 0 ); } } @Test public void testRunningZeroRowsIsEmptyOptional() throws Exception { SubtransExecutor subtransExecutor = new SubtransExecutor( "subtransname", null, null, false, null, "", 0 ); Optional<Result> execute = subtransExecutor.execute( Collections.emptyList() ); assertFalse( execute.isPresent() ); } @Test public void testRunsATrans() throws Exception { TransMeta parentMeta = new TransMeta( this.getClass().getResource( "subtrans-executor-parent.ktr" ).getPath(), new Variables() ); TransMeta subMeta = new TransMeta( this.getClass().getResource( "subtrans-executor-sub.ktr" ).getPath(), new Variables() ); LoggingObjectInterface loggingObject = new LoggingObject( "anything" ); Trans parentTrans = spy( new Trans( parentMeta, loggingObject ) ); SubtransExecutor subtransExecutor = new SubtransExecutor( "subtransname", parentTrans, subMeta, true, new TransExecutorParameters(), "Group By", 1001 ); RowMetaInterface rowMeta = parentMeta.getStepFields( "Data Grid" ); List<RowMetaAndData> rows = Arrays.asList( new RowMetaAndData( rowMeta, "Pentaho", 1L ), new RowMetaAndData( rowMeta, "Pentaho", 2L ), new RowMetaAndData( rowMeta, "Pentaho", 3L ), new RowMetaAndData( rowMeta, "Pentaho", 4L ) ); Optional<Result> optionalResult = subtransExecutor.execute( rows ); assertEquals( 1, optionalResult.orElseThrow( AssertionError::new ).getRows().size() ); verify( this.logChannel ) .logBasic( Const.CR + "------------> Linenr 1------------------------------" + Const.CR + "name = Pentaho" + Const.CR + "sum = 10" + Const.CR + Const.CR + "====================" ); Map<String, StepStatus> statuses = subtransExecutor.getStatuses(); assertEquals( 3, statuses.size() ); List<StepStatus> statusList = new ArrayList<>( statuses.values() ); assertEquals( "Get rows from result", statusList.get( 0 ).getStepname() ); assertEquals( "Group by", statusList.get( 1 ).getStepname() ); assertEquals( "Write to log", statusList.get( 2 ).getStepname() ); for ( Map.Entry<String, StepStatus> entry : statuses.entrySet() ) { StepStatus statusSpy = spy( entry.getValue() ); statuses.put( entry.getKey(), statusSpy ); } subtransExecutor.execute( rows ); for ( Map.Entry<String, StepStatus> entry : statuses.entrySet() ) { verify( entry.getValue() ).updateAll( any() ); } verify( parentTrans, atLeastOnce() ).addActiveSubTransformation( eq( "subtransname" ), any( Trans.class ) ); } @Test public void stopsAll() throws KettleException { TransMeta parentMeta = new TransMeta( this.getClass().getResource( "subtrans-executor-parent.ktr" ).getPath(), new Variables() ); TransMeta subMeta = new TransMeta( this.getClass().getResource( "subtrans-executor-sub.ktr" ).getPath(), new Variables() ); LoggingObjectInterface loggingObject = new LoggingObject( "anything" ); Trans parentTrans = new Trans( parentMeta, loggingObject ); SubtransExecutor subtransExecutor = new SubtransExecutor( "subtransname", parentTrans, subMeta, true, new TransExecutorParameters(), "", 1001 ); subtransExecutor.running = Mockito.spy( subtransExecutor.running ); RowMetaInterface rowMeta = parentMeta.getStepFields( "Data Grid" ); List<RowMetaAndData> rows = Arrays.asList( new RowMetaAndData( rowMeta, "Pentaho", 1L ), new RowMetaAndData( rowMeta, "Pentaho", 2L ), new RowMetaAndData( rowMeta, "Pentaho", 3L ), new RowMetaAndData( rowMeta, "Pentaho", 4L ) ); subtransExecutor.execute( rows ); verify( subtransExecutor.running ).add( any() ); subtransExecutor.stop(); assertTrue( subtransExecutor.running.isEmpty() ); } @Test public void doesNotExecuteWhenStopped() throws KettleException { TransMeta parentMeta = new TransMeta( this.getClass().getResource( "subtrans-executor-parent.ktr" ).getPath(), new Variables() ); TransMeta subMeta = new TransMeta( this.getClass().getResource( "subtrans-executor-sub.ktr" ).getPath(), new Variables() ); LoggingObjectInterface loggingObject = new LoggingObject( "anything" ); Trans parentTrans = new Trans( parentMeta, loggingObject ); SubtransExecutor subtransExecutor = new SubtransExecutor( "subtransname", parentTrans, subMeta, true, new TransExecutorParameters(), "", 1001 ); RowMetaInterface rowMeta = parentMeta.getStepFields( "Data Grid" ); List<RowMetaAndData> rows = Arrays.asList( new RowMetaAndData( rowMeta, "Pentaho", 1L ), new RowMetaAndData( rowMeta, "Pentaho", 2L ), new RowMetaAndData( rowMeta, "Pentaho", 3L ), new RowMetaAndData( rowMeta, "Pentaho", 4L ) ); subtransExecutor.stop(); subtransExecutor.execute( rows ); verify( this.logChannel, never() ) .logBasic( "\n" + "------------> Linenr 1------------------------------\n" + "name = Pentaho\n" + "sum = 10\n" + "\n" + "====================" ); } /** * When we acquire the last permit, the thread should block until we timeout the test */ @Test public void blockOnAcquiringLastPermit() throws InterruptedException, ExecutionException, TimeoutException { final ExecutorService executorService = Executors.newFixedThreadPool( 1 ); SubtransExecutor subtransExecutor = new SubtransExecutor( "subtransname", null, null, false, null, "", 0 ); CompletableFuture<Boolean> acquireThreadRunning = new CompletableFuture(); //Prefetch count is set to 0, so blocking occurs when that single permit is acquired Future future = executorService.submit( () -> safeAcquirePermits( subtransExecutor, 1, acquireThreadRunning ) ); assertTrue( acquireThreadRunning.get( 5, TimeUnit.SECONDS ) ); final boolean timedOut = !safeWaitForCompletion( future, 2 ); assertTrue( timedOut ); } @Test public void blockAndUnblockTwice() throws KettleException, InterruptedException, ExecutionException, TimeoutException { final ExecutorService executorService = Executors.newFixedThreadPool( 1 ); TransMeta parentMeta = new TransMeta( this.getClass().getResource( "subtrans-executor-parent.ktr" ).getPath(), new Variables() ); TransMeta subMeta = new TransMeta( this.getClass().getResource( "subtrans-executor-sub.ktr" ).getPath(), new Variables() ); LoggingObjectInterface loggingObject = new LoggingObject( "anything" ); Trans parentTrans = new Trans( parentMeta, loggingObject ); SubtransExecutor subtransExecutor = new SubtransExecutor( "subtransname", parentTrans, subMeta, true, new TransExecutorParameters(), "", 1 ); RowMetaInterface rowMeta = parentMeta.getStepFields( "Data Grid" ); List<RowMetaAndData> rows = Arrays.asList( new RowMetaAndData( rowMeta, "Pentaho", 1L ), new RowMetaAndData( rowMeta, "Pentaho", 2L ), new RowMetaAndData( rowMeta, "Pentaho", 3L ), new RowMetaAndData( rowMeta, "Pentaho", 4L ) ); CompletableFuture<Boolean> acquireThreadRunning = new CompletableFuture(); Future future = executorService.submit( () -> safeAcquirePermits( subtransExecutor, 8, acquireThreadRunning ) ); //Acquire the permits before releasing them by waiting for the acquire thread to spin up. assertTrue( acquireThreadRunning.get( 5, TimeUnit.SECONDS ) ); //Release 2*4=8 permits by calling execute twice with 4 rows subtransExecutor.execute( rows ); subtransExecutor.execute( rows ); final boolean timedOut = !safeWaitForCompletion( future, 5 ); assertFalse( timedOut ); } private void safeAcquirePermits( SubtransExecutor subtransExecutor, int permitCount, CompletableFuture<Boolean> threadRunning ) { try { for ( int i = 0; i < permitCount; i++ ) { threadRunning.complete( true ); subtransExecutor.acquireBufferPermit(); } } catch ( InterruptedException e ) { fail(); } } /** * @param future * @return false if timed out, otherwise true */ private boolean safeWaitForCompletion( Future future, long timeoutSeconds ) { try { future.get( timeoutSeconds, TimeUnit.SECONDS ); } catch ( TimeoutException te ) { return false; } catch ( InterruptedException | ExecutionException e ) { fail(); } return true; } }
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.amplify.model; import java.io.Serializable; import javax.annotation.Generated; /** * <p> * Result structure for the list Domain Association request. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/amplify-2017-07-25/ListDomainAssociations" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ListDomainAssociationsResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable { /** * <p> * List of Domain Associations. * </p> */ private java.util.List<DomainAssociation> domainAssociations; /** * <p> * Pagination token. If non-null pagination token is returned in a result, then pass its value in another request to * fetch more entries. * </p> */ private String nextToken; /** * <p> * List of Domain Associations. * </p> * * @return List of Domain Associations. */ public java.util.List<DomainAssociation> getDomainAssociations() { return domainAssociations; } /** * <p> * List of Domain Associations. * </p> * * @param domainAssociations * List of Domain Associations. */ public void setDomainAssociations(java.util.Collection<DomainAssociation> domainAssociations) { if (domainAssociations == null) { this.domainAssociations = null; return; } this.domainAssociations = new java.util.ArrayList<DomainAssociation>(domainAssociations); } /** * <p> * List of Domain Associations. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setDomainAssociations(java.util.Collection)} or {@link #withDomainAssociations(java.util.Collection)} if * you want to override the existing values. * </p> * * @param domainAssociations * List of Domain Associations. * @return Returns a reference to this object so that method calls can be chained together. */ public ListDomainAssociationsResult withDomainAssociations(DomainAssociation... domainAssociations) { if (this.domainAssociations == null) { setDomainAssociations(new java.util.ArrayList<DomainAssociation>(domainAssociations.length)); } for (DomainAssociation ele : domainAssociations) { this.domainAssociations.add(ele); } return this; } /** * <p> * List of Domain Associations. * </p> * * @param domainAssociations * List of Domain Associations. * @return Returns a reference to this object so that method calls can be chained together. */ public ListDomainAssociationsResult withDomainAssociations(java.util.Collection<DomainAssociation> domainAssociations) { setDomainAssociations(domainAssociations); return this; } /** * <p> * Pagination token. If non-null pagination token is returned in a result, then pass its value in another request to * fetch more entries. * </p> * * @param nextToken * Pagination token. If non-null pagination token is returned in a result, then pass its value in another * request to fetch more entries. */ public void setNextToken(String nextToken) { this.nextToken = nextToken; } /** * <p> * Pagination token. If non-null pagination token is returned in a result, then pass its value in another request to * fetch more entries. * </p> * * @return Pagination token. If non-null pagination token is returned in a result, then pass its value in another * request to fetch more entries. */ public String getNextToken() { return this.nextToken; } /** * <p> * Pagination token. If non-null pagination token is returned in a result, then pass its value in another request to * fetch more entries. * </p> * * @param nextToken * Pagination token. If non-null pagination token is returned in a result, then pass its value in another * request to fetch more entries. * @return Returns a reference to this object so that method calls can be chained together. */ public ListDomainAssociationsResult withNextToken(String nextToken) { setNextToken(nextToken); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getDomainAssociations() != null) sb.append("DomainAssociations: ").append(getDomainAssociations()).append(","); if (getNextToken() != null) sb.append("NextToken: ").append(getNextToken()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ListDomainAssociationsResult == false) return false; ListDomainAssociationsResult other = (ListDomainAssociationsResult) obj; if (other.getDomainAssociations() == null ^ this.getDomainAssociations() == null) return false; if (other.getDomainAssociations() != null && other.getDomainAssociations().equals(this.getDomainAssociations()) == false) return false; if (other.getNextToken() == null ^ this.getNextToken() == null) return false; if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getDomainAssociations() == null) ? 0 : getDomainAssociations().hashCode()); hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode()); return hashCode; } @Override public ListDomainAssociationsResult clone() { try { return (ListDomainAssociationsResult) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
package com.communote.server.core.security; import java.util.List; import org.apache.commons.lang.builder.ToStringBuilder; import org.apache.commons.lang.builder.ToStringStyle; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.testng.Assert; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; import com.communote.server.api.ServiceLocator; import com.communote.server.core.security.iprange.CurrentIpNotInRange; import com.communote.server.core.security.iprange.InvalidIpAddressException; import com.communote.server.core.security.iprange.IpRangeException; import com.communote.server.core.security.iprange.IpRangeFilterManagement; import com.communote.server.core.security.iprange.IpRangeHelper; import com.communote.server.model.security.ChannelType; import com.communote.server.model.security.IpRange; import com.communote.server.persistence.security.iprange.IpRangeFilterVO; import com.communote.server.test.CommunoteIntegrationTest; import com.communote.server.test.util.AuthenticationTestUtils; /** * The Class IpRangeFilterManagementTest. * * @author Communote GmbH - <a href="http://www.communote.com/">http://www.communote.com/</a> */ public class IpRangeFilterManagementTest extends CommunoteIntegrationTest { /** The Constant LOG. */ private static final Logger LOG = LoggerFactory.getLogger(IpRangeFilterManagementTest.class); /** The Constant CURRENT_IP_V4. */ private static final String CURRENT_IP_V4 = "127.0.0.1"; private static final String IP_FILTER_INCLUDING_CURRENT_IP = "IncludeExcludeFilter"; private static final String IP_FILTER_EMPTY = "emptyfilter"; private static final ChannelType WEB_CHANNEL = ChannelType.WEB; private IpRangeFilterManagement ipRangeFilterManagement; /** * Creates a new filter and tries to enable it. * * @param name * Name of filter * @param includes * Include IPs * @param excludes * exclude IPs * @param currentIP * current IP address * @param channel * the channel the filter will apply to * @return the VO of the created filter * @throws Exception * in case creation or activation fails */ private IpRangeFilterVO createFilter(String name, String includes, String excludes, String currentIP, ChannelType channel) throws Exception { IpRangeFilterVO vo = ipRangeFilterManagement.createFilter(name, includes, excludes); ipRangeFilterManagement.setFilterChannelEnabled(vo.getId(), channel, true, currentIP, WEB_CHANNEL); ipRangeFilterManagement.setFilterEnabled(vo.getId(), true, CURRENT_IP_V4, WEB_CHANNEL); return vo; } /** * Retrieves existing filters. * * @return a list with the existing IP range filters */ private List<IpRangeFilterVO> getFilters() { return ipRangeFilterManagement.listFilter(); } /** * Remove all filter * * @throws CurrentIpNotInRange * in case the remove of a filter filed because of current IP being blocked * @throws InvalidIpAddressException * in case the current IP is not a valid IP address */ private void removeAllFilter() throws CurrentIpNotInRange, InvalidIpAddressException { List<IpRangeFilterVO> filters = ipRangeFilterManagement.listFilter(); // remove filter that includes the current IP after all the other filters to avoid // CurrentIpNotInRange Long lastFilterToRemove = null; for (IpRangeFilterVO filter : filters) { if (filter.getName().equals(IP_FILTER_INCLUDING_CURRENT_IP)) { lastFilterToRemove = filter.getId(); } else { removeFilter(filter.getId()); } } if (lastFilterToRemove != null) { removeFilter(lastFilterToRemove); } } /** * Remove filter with the given id * * @param id * Filter id * @throws CurrentIpNotInRange * CurrentIpNotInRange * @throws InvalidIpAddressException * in case the current IP is not a valid IP address */ private void removeFilter(Long id) throws CurrentIpNotInRange, InvalidIpAddressException { ipRangeFilterManagement.removeFilter(id, CURRENT_IP_V4, WEB_CHANNEL); } /** * Test channel enabled. * * @throws Exception * the exception */ @BeforeClass(dependsOnGroups = "integration-test-setup") public void setupChannelEnabled() throws Exception { ipRangeFilterManagement = ServiceLocator.instance().getService( IpRangeFilterManagement.class); AuthenticationTestUtils.setManagerContext(); for (String channel : ChannelType.names()) { ipRangeFilterManagement.setChannelEnabled(ChannelType.fromString(channel), true, CURRENT_IP_V4, WEB_CHANNEL); } } /** * Test to ensure that you cannot enable a filter that would block your current IP. * * @throws Exception * in case the test failed */ @Test(dependsOnMethods = "testUpdateFilter", expectedExceptions = { CurrentIpNotInRange.class }) public void testAvoidBlockingCurrentIp() throws Exception { AuthenticationTestUtils.setManagerContext(); removeAllFilter(); createFilter("filter1", "192.1.123.44", "", CURRENT_IP_V4, ChannelType.WEB); } /** * Tests that one cannot remove the filter that grants access for the current IP. * * @throws Exception * the expected exception or another if the test failed */ @Test(expectedExceptions = { CurrentIpNotInRange.class }, dependsOnMethods = { "testFindIpRanges" }) public void testAvoidBlockingCurrentIpWhenRemoving() throws Exception { AuthenticationTestUtils.setManagerContext(); List<IpRangeFilterVO> filters = getFilters(); Assert.assertEquals(filters.size(), 4); for (IpRangeFilterVO filter : filters) { if (filter.getName().equals(IP_FILTER_INCLUDING_CURRENT_IP)) { removeFilter(filter.getId()); break; } } } /** * Test create filter. * * @throws IpRangeException * the ip range exception */ @Test public void testCreateFilter() throws IpRangeException { AuthenticationTestUtils.setManagerContext(); try { IpRangeFilterVO filter = ipRangeFilterManagement.createFilter(IP_FILTER_EMPTY, "", ""); Assert.assertNotNull(filter, "got no filter"); filter = ipRangeFilterManagement.createFilter(IP_FILTER_INCLUDING_CURRENT_IP, "127.0.0.1", "3.4.5.6"); LOG.debug("filter: \n" + ToStringBuilder.reflectionToString(filter, ToStringStyle.MULTI_LINE_STYLE)); filter = ipRangeFilterManagement.createFilter("IncludeFilter", "168.0.0.1", ""); LOG.debug("filter: \n" + ToStringBuilder.reflectionToString(filter, ToStringStyle.MULTI_LINE_STYLE)); filter = ipRangeFilterManagement.createFilter("IncludeFilter_2", "::168.0.0.1-::168.0.5.0", ""); LOG.debug("filter: \n" + ToStringBuilder.reflectionToString(filter, ToStringStyle.MULTI_LINE_STYLE)); } catch (IpRangeException e) { LOG.error(e + ", ip: '" + e.getIp() + "'", e); throw e; } } /** * Test filter channel enabled. * * @throws Exception * the exception */ @Test(dependsOnMethods = "testCreateFilter") public void testFilterChannelEnabled() throws Exception { AuthenticationTestUtils.setManagerContext(); List<IpRangeFilterVO> filtersVO = getFilters(); for (IpRangeFilterVO filterVO : filtersVO) { ipRangeFilterManagement.setFilterChannelEnabled(filterVO.getId(), ChannelType.API, true, CURRENT_IP_V4, WEB_CHANNEL); ipRangeFilterManagement.setFilterChannelEnabled(filterVO.getId(), ChannelType.WEB, true, CURRENT_IP_V4, WEB_CHANNEL); } for (IpRangeFilterVO filterVO : filtersVO) { filterVO = ipRangeFilterManagement.findFilterById(filterVO.getId()); Assert.assertSame(filterVO.getChannels().length, 2, "Two channel must be setted"); } } /** * Test filter enabled. * * @throws Exception * the exception */ @Test(dependsOnMethods = "testListFilter") public void testFilterEnabled() throws Exception { AuthenticationTestUtils.setManagerContext(); List<IpRangeFilterVO> filtersVO = getFilters(); for (IpRangeFilterVO filterVO : filtersVO) { ipRangeFilterManagement.setFilterEnabled(filterVO.getId(), true, CURRENT_IP_V4, WEB_CHANNEL); filterVO = ipRangeFilterManagement.findFilterById(filterVO.getId()); Assert.assertTrue(filterVO.isEnabled(), "The filter must be enabled"); } } /** * Test find all ip ranges * * @throws Exception * the exception */ @Test(dependsOnMethods = "testFilterEnabled") public void testFindIpRanges() throws Exception { AuthenticationTestUtils.setManagerContext(); List<IpRange> includes = ipRangeFilterManagement.findIpRanges(ChannelType.WEB, true); List<IpRange> excludes = ipRangeFilterManagement.findIpRanges(ChannelType.WEB, false); Assert.assertTrue(includes.size() != 0, "The include list of ip ranges can not be null"); Assert.assertTrue(excludes.size() != 0, "The exclude list of ip ranges can not be null"); } /** * Test some ip addresses validation * */ @Test public void testIpValidation() { String[] ips = new String[4]; ips[0] = "127.0.0.1"; ips[1] = "fe80:0:0:0:b136:8d30:bcf5:c2bf"; ips[2] = "0:0:0:0:0:0:0:1"; ips[3] = "::c000:9b00"; for (String ip : ips) { boolean isValid = IpRangeHelper.validateIP(ip); Assert.assertTrue(isValid, "The ip " + ip + " is not valid"); } ips[0] = "127.:.0.1"; ips[1] = "fe80:q:0:0:b136:8d30:bcf5:c2bf"; ips[2] = "0:0:0:0:0:0:1"; ips[3] = "e.e.e.e.222.tt"; for (String ip : ips) { boolean isValid = IpRangeHelper.validateIP(ip); Assert.assertFalse(isValid, "The invalide ip " + ip + " "); } } /** * Test list filter. * * @throws Exception * the exception */ @Test(dependsOnMethods = "testFilterChannelEnabled") public void testListFilter() throws Exception { AuthenticationTestUtils.setManagerContext(); List<IpRangeFilterVO> filtersVO = ipRangeFilterManagement.listFilter(); Assert.assertSame(filtersVO.size(), 4, "Four filters must be defined"); } /** * Test whether an ip is in defined ranges * * @throws Exception * in case the test failed */ @Test(dependsOnMethods = { "testIpValidation", "testAvoidBlockingCurrentIp" }) public void testRange() throws Exception { AuthenticationTestUtils.setManagerContext(); // no blocking if no filters exist Assert.assertTrue(ipRangeFilterManagement.isInRange("127.0.0.1", ChannelType.WEB)); Assert.assertTrue(ipRangeFilterManagement.isInRange("192.168.144.155", ChannelType.WEB)); // include range IpRangeFilterVO vo1 = createFilter("filter1", "127.0.0.1,192.168.1.1-192.168.255.255", "", CURRENT_IP_V4, ChannelType.WEB); Assert.assertTrue(ipRangeFilterManagement.isInRange("127.0.0.1", ChannelType.WEB)); Assert.assertTrue(ipRangeFilterManagement.isInRange("192.168.144.155", ChannelType.WEB)); Assert.assertFalse(ipRangeFilterManagement.isInRange("192.167.144.155", ChannelType.WEB)); removeFilter(vo1.getId()); // exclude range IpRangeFilterVO vo2 = createFilter("filter2", "", "0.0.0.0-127.0.0.0,127.0.0.2-255.255.255.255", CURRENT_IP_V4, ChannelType.WEB); Assert.assertTrue(ipRangeFilterManagement.isInRange("127.0.0.1", ChannelType.WEB)); Assert.assertFalse(ipRangeFilterManagement.isInRange("192.167.144.155", ChannelType.WEB)); removeFilter(vo2.getId()); // single include IpRangeFilterVO vo3 = createFilter("filter3", CURRENT_IP_V4, "", CURRENT_IP_V4, ChannelType.WEB); Assert.assertTrue(ipRangeFilterManagement.isInRange(CURRENT_IP_V4, ChannelType.WEB)); Assert.assertFalse(ipRangeFilterManagement.isInRange("192.167.144.155", ChannelType.WEB)); removeFilter(vo3.getId()); } /** * Test remove filter. * * @throws Exception * in case the test failed */ @Test(dependsOnMethods = "testAvoidBlockingCurrentIpWhenRemoving") public void testRemoveFilter() throws Exception { AuthenticationTestUtils.setManagerContext(); List<IpRangeFilterVO> filters = getFilters(); Long id = null; for (IpRangeFilterVO filter : filters) { // do not remove filter granting access for current IP and also not the empty filter // which is needed later if (!filter.getName().equals(IP_FILTER_INCLUDING_CURRENT_IP) && !filter.getName().equals(IP_FILTER_EMPTY)) { id = filter.getId(); break; } } Assert.assertNotNull(id, "found no filter to remove."); ipRangeFilterManagement.removeFilter(id, CURRENT_IP_V4, WEB_CHANNEL); IpRangeFilterVO filterVO = ipRangeFilterManagement.findFilterById(id); Assert.assertNull(filterVO, "The filter for id " + id + " was not successfully removed"); } /** * Tests updating a filter (the empty filter). * * @throws Exception * in case the test fails */ @Test(dependsOnMethods = "testRemoveFilter") public void testUpdateFilter() throws Exception { AuthenticationTestUtils.setManagerContext(); List<IpRangeFilterVO> filters = getFilters(); Long id = null; for (IpRangeFilterVO filter : filters) { if (filter.getName().equals(IP_FILTER_EMPTY)) { id = filter.getId(); } } Assert.assertNotNull(id, "filter to update not found."); String newFilterName = "New Filtername"; String includesPattern = "127.0.0.1-127.0.0.7"; String excludesPattern = "127.0.0.8"; ipRangeFilterManagement.updateFilter(id, newFilterName, includesPattern, excludesPattern, CURRENT_IP_V4, WEB_CHANNEL); filters = getFilters(); Assert.assertEquals(filters.size(), 3, "number of filters changed."); for (IpRangeFilterVO filter : filters) { if (filter.getName().equals(IP_FILTER_EMPTY)) { Assert.fail("Filtername was not changed"); } else if (filter.getName().equals(newFilterName)) { Assert.assertEquals(filter.getId(), id, "wrong filter was updated."); Assert.assertEquals(filter.getExcludes(), excludesPattern); Assert.assertEquals(filter.getIncludes(), includesPattern); } } } }
/* Copyright 2007-2009 Selenium committers Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.thoughtworks.selenium.webdriven; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Supplier; import com.google.common.collect.Maps; import com.thoughtworks.selenium.CommandProcessor; import com.thoughtworks.selenium.SeleniumException; import com.thoughtworks.selenium.webdriven.commands.*; import org.openqa.selenium.HasCapabilities; import org.openqa.selenium.JavascriptExecutor; import org.openqa.selenium.WebDriver; import org.openqa.selenium.internal.WrapsDriver; import java.util.Map; /** * A CommandProcessor which delegates commands down to an underlying webdriver instance. */ public class WebDriverCommandProcessor implements CommandProcessor, WrapsDriver { private final Map<String, SeleneseCommand<?>> seleneseMethods = Maps.newHashMap(); private final String baseUrl; private final Timer timer; private final CompoundMutator scriptMutator; private boolean enableAlertOverrides = true; private Supplier<WebDriver> maker; private WebDriver driver; public WebDriverCommandProcessor(String baseUrl, WebDriver driver) { this(baseUrl, new ExplodingSupplier()); this.driver = driver; assertDriverSupportsJavascript(driver); setUpMethodMap(); } public WebDriverCommandProcessor(String baseUrl, Supplier<WebDriver> maker) { this.maker = maker; this.baseUrl = baseUrl; this.timer = new Timer(30000); this.scriptMutator = new CompoundMutator(baseUrl); } public WebDriver getWrappedDriver() { return driver; } public String getRemoteControlServerLocation() { throw new UnsupportedOperationException(); } public String doCommand(String commandName, String[] args) { Object val = execute(commandName, args); if (val == null) { return null; } return val.toString(); } public void setExtensionJs(String s) { throw new UnsupportedOperationException(); } public void start() { start((Object) null); } public void start(String s) { throw new UnsupportedOperationException("Unsure how to process: " + s); } public void start(Object o) { if (driver != null) { if (maker != null) { throw new SeleniumException("You may not start more than one session at a time"); } else { // The command processor was instantiated with an already started driver return; } } driver = maker.get(); assertDriverSupportsJavascript(driver); setUpMethodMap(); } public void stop() { timer.stop(); if (driver != null) { driver.quit(); } driver = null; } public String getString(String commandName, String[] args) { return (String) execute(commandName, args); } public String[] getStringArray(String commandName, String[] args) { return (String[]) execute(commandName, args); } public Number getNumber(String commandName, String[] args) { return (Number) execute(commandName, args); } public Number[] getNumberArray(String s, String[] strings) { throw new UnsupportedOperationException(); } public boolean getBoolean(String commandName, String[] args) { return (Boolean) execute(commandName, args); } public boolean[] getBooleanArray(String s, String[] strings) { throw new UnsupportedOperationException(); } private Object execute(String commandName, final String[] args) { final SeleneseCommand<?> command = seleneseMethods.get(commandName); if (command == null) { throw new UnsupportedOperationException(commandName); } return timer.run(command, driver, args); } public void addMutator(ScriptMutator mutator) { scriptMutator.addMutator(mutator); } public boolean isMethodAvailable(String methodName) { return seleneseMethods.containsKey(methodName); } public void addMethod(String methodName, SeleneseCommand<?> command) { seleneseMethods.put(methodName, command); } public SeleneseCommand<?> getMethod(String methodName) { return seleneseMethods.get(methodName); } @VisibleForTesting protected void assertDriverSupportsJavascript(WebDriver driver) { if (!(driver instanceof JavascriptExecutor)) { throw new IllegalStateException("Driver instance must support JS."); } if (!(driver instanceof HasCapabilities)) { // Might be proxy. Bail. return; } if (!((HasCapabilities) driver).getCapabilities().isJavascriptEnabled()) { throw new IllegalStateException("JS support must be enabled."); } } /** * Sets whether to enable emulation of Selenium's alert handling functions or * to preserve WebDriver's alert handling. This has no affect after calling * {@link #start()}. */ public void setEnableAlertOverrides(boolean enableAlertOverrides) { this.enableAlertOverrides = enableAlertOverrides; } private void setUpMethodMap() { JavascriptLibrary javascriptLibrary = new JavascriptLibrary(); ElementFinder elementFinder = new ElementFinder(javascriptLibrary); KeyState keyState = new KeyState(); AlertOverride alertOverride = new AlertOverride(enableAlertOverrides); Windows windows = new Windows(driver); // Note the we use the names used by the CommandProcessor seleneseMethods.put("addLocationStrategy", new AddLocationStrategy(elementFinder)); seleneseMethods.put("addSelection", new AddSelection(javascriptLibrary, elementFinder)); seleneseMethods.put("allowNativeXpath", new AllowNativeXPath()); seleneseMethods.put("altKeyDown", new AltKeyDown(keyState)); seleneseMethods.put("altKeyUp", new AltKeyUp(keyState)); seleneseMethods.put("assignId", new AssignId(javascriptLibrary, elementFinder)); seleneseMethods.put("attachFile", new AttachFile(elementFinder)); seleneseMethods.put("captureScreenshotToString", new CaptureScreenshotToString()); seleneseMethods.put("click", new Click(alertOverride, elementFinder)); seleneseMethods.put("clickAt", new ClickAt(alertOverride, elementFinder)); seleneseMethods.put("check", new Check(alertOverride, elementFinder)); seleneseMethods.put("chooseCancelOnNextConfirmation", new SetNextConfirmationState(false)); seleneseMethods.put("chooseOkOnNextConfirmation", new SetNextConfirmationState(true)); seleneseMethods.put("close", new Close()); seleneseMethods.put("createCookie", new CreateCookie()); seleneseMethods.put("controlKeyDown", new ControlKeyDown(keyState)); seleneseMethods.put("controlKeyUp", new ControlKeyUp(keyState)); seleneseMethods.put("deleteAllVisibleCookies", new DeleteAllVisibleCookies()); seleneseMethods.put("deleteCookie", new DeleteCookie()); seleneseMethods.put("deselectPopUp", new DeselectPopUp(windows)); seleneseMethods.put("doubleClick", new DoubleClick(alertOverride, elementFinder)); seleneseMethods.put("dragdrop", new DragAndDrop(elementFinder)); seleneseMethods.put("dragAndDrop", new DragAndDrop(elementFinder)); seleneseMethods.put("dragAndDropToObject", new DragAndDropToObject(elementFinder)); seleneseMethods.put("fireEvent", new FireEvent(elementFinder, javascriptLibrary)); seleneseMethods.put("focus", new FireNamedEvent(elementFinder, javascriptLibrary, "focus")); seleneseMethods.put("getAlert", new GetAlert(alertOverride)); seleneseMethods.put("getAllButtons", new GetAllButtons()); seleneseMethods.put("getAllFields", new GetAllFields()); seleneseMethods.put("getAllLinks", new GetAllLinks()); seleneseMethods.put("getAllWindowNames", new GetAllWindowNames()); seleneseMethods.put("getAllWindowTitles", new GetAllWindowTitles()); seleneseMethods.put("getAttribute", new GetAttribute(javascriptLibrary, elementFinder)); seleneseMethods.put("getAttributeFromAllWindows", new GetAttributeFromAllWindows()); seleneseMethods.put("getBodyText", new GetBodyText()); seleneseMethods.put("getConfirmation", new GetConfirmation(alertOverride)); seleneseMethods.put("getCookie", new GetCookie()); seleneseMethods.put("getCookieByName", new GetCookieByName()); seleneseMethods.put("getElementHeight", new GetElementHeight(elementFinder)); seleneseMethods.put("getElementIndex", new GetElementIndex(elementFinder, javascriptLibrary)); seleneseMethods.put("getElementPositionLeft", new GetElementPositionLeft(elementFinder)); seleneseMethods.put("getElementPositionTop", new GetElementPositionTop(elementFinder)); seleneseMethods.put("getElementWidth", new GetElementWidth(elementFinder)); seleneseMethods.put("getEval", new GetEval(scriptMutator)); seleneseMethods.put("getExpression", new GetExpression()); seleneseMethods.put("getHtmlSource", new GetHtmlSource()); seleneseMethods.put("getLocation", new GetLocation()); seleneseMethods.put("getSelectedId", new FindFirstSelectedOptionProperty(javascriptLibrary, elementFinder, "id")); seleneseMethods.put("getSelectedIds", new FindSelectedOptionProperties(javascriptLibrary, elementFinder, "id")); seleneseMethods.put("getSelectedIndex", new FindFirstSelectedOptionProperty(javascriptLibrary, elementFinder, "index")); seleneseMethods.put("getSelectedIndexes", new FindSelectedOptionProperties(javascriptLibrary, elementFinder, "index")); seleneseMethods.put("getSelectedLabel", new FindFirstSelectedOptionProperty(javascriptLibrary, elementFinder, "text")); seleneseMethods.put("getSelectedLabels", new FindSelectedOptionProperties(javascriptLibrary, elementFinder, "text")); seleneseMethods.put("getSelectedValue", new FindFirstSelectedOptionProperty(javascriptLibrary, elementFinder, "value")); seleneseMethods.put("getSelectedValues", new FindSelectedOptionProperties(javascriptLibrary, elementFinder, "value")); seleneseMethods.put("getSelectOptions", new GetSelectOptions(javascriptLibrary, elementFinder)); seleneseMethods.put("getSpeed", new NoOp("0")); seleneseMethods.put("getTable", new GetTable(elementFinder, javascriptLibrary)); seleneseMethods.put("getText", new GetText(javascriptLibrary, elementFinder)); seleneseMethods.put("getTitle", new GetTitle()); seleneseMethods.put("getValue", new GetValue(elementFinder)); seleneseMethods.put("getXpathCount", new GetXpathCount()); seleneseMethods.put("getCssCount", new GetCssCount()); seleneseMethods.put("goBack", new GoBack()); seleneseMethods.put("highlight", new Highlight(elementFinder, javascriptLibrary)); seleneseMethods.put("isAlertPresent", new IsAlertPresent(alertOverride)); seleneseMethods.put("isChecked", new IsChecked(elementFinder)); seleneseMethods.put("isConfirmationPresent", new IsConfirmationPresent(alertOverride)); seleneseMethods.put("isCookiePresent", new IsCookiePresent()); seleneseMethods.put("isEditable", new IsEditable(elementFinder)); seleneseMethods.put("isElementPresent", new IsElementPresent(elementFinder)); seleneseMethods.put("isOrdered", new IsOrdered(elementFinder, javascriptLibrary)); seleneseMethods.put("isSomethingSelected", new IsSomethingSelected(javascriptLibrary)); seleneseMethods.put("isTextPresent", new IsTextPresent(javascriptLibrary)); seleneseMethods.put("isVisible", new IsVisible(elementFinder)); seleneseMethods.put("keyDown", new KeyEvent(elementFinder, javascriptLibrary, keyState, "doKeyDown")); seleneseMethods.put("keyDownNative", new KeyDownNative()); seleneseMethods.put("keyPress", new TypeKeys(alertOverride, elementFinder)); seleneseMethods.put("keyPressNative", new KeyPressNative()); seleneseMethods.put("keyUp", new KeyEvent(elementFinder, javascriptLibrary, keyState, "doKeyUp")); seleneseMethods.put("keyUpNative", new KeyUpNative()); seleneseMethods.put("metaKeyDown", new MetaKeyDown(keyState)); seleneseMethods.put("metaKeyUp", new MetaKeyUp(keyState)); seleneseMethods.put("mouseOver", new MouseEvent(elementFinder, javascriptLibrary, "mouseover")); seleneseMethods.put("mouseOut", new MouseEvent(elementFinder, javascriptLibrary, "mouseout")); seleneseMethods.put("mouseDown", new MouseEvent(elementFinder, javascriptLibrary, "mousedown")); seleneseMethods.put("mouseDownAt", new MouseEventAt(elementFinder, javascriptLibrary, "mousedown")); seleneseMethods.put("mouseMove", new MouseEvent(elementFinder, javascriptLibrary, "mousemove")); seleneseMethods.put("mouseMoveAt", new MouseEventAt(elementFinder, javascriptLibrary, "mousemove")); seleneseMethods.put("mouseUp", new MouseEvent(elementFinder, javascriptLibrary, "mouseup")); seleneseMethods.put("mouseUpAt", new MouseEventAt(elementFinder, javascriptLibrary, "mouseup")); seleneseMethods.put("open", new Open(baseUrl)); seleneseMethods.put("openWindow", new OpenWindow(baseUrl, new GetEval(scriptMutator))); seleneseMethods.put("refresh", new Refresh()); seleneseMethods.put("removeAllSelections", new RemoveAllSelections(elementFinder)); seleneseMethods.put("removeSelection", new RemoveSelection(javascriptLibrary, elementFinder)); seleneseMethods.put("runScript", new RunScript(scriptMutator)); seleneseMethods.put("select", new SelectOption(alertOverride, javascriptLibrary, elementFinder)); seleneseMethods.put("selectFrame", new SelectFrame(windows)); seleneseMethods.put("selectPopUp", new SelectPopUp(windows)); seleneseMethods.put("selectWindow", new SelectWindow(windows)); seleneseMethods.put("setBrowserLogLevel", new NoOp(null)); seleneseMethods.put("setContext", new NoOp(null)); seleneseMethods.put("setSpeed", new NoOp(null)); seleneseMethods.put("setTimeout", new SetTimeout(timer)); seleneseMethods.put("shiftKeyDown", new ShiftKeyDown(keyState)); seleneseMethods.put("shiftKeyUp", new ShiftKeyUp(keyState)); seleneseMethods.put("submit", new Submit(alertOverride, elementFinder)); seleneseMethods.put("type", new Type(alertOverride, javascriptLibrary, elementFinder, keyState)); seleneseMethods.put("typeKeys", new TypeKeys(alertOverride, elementFinder)); seleneseMethods.put("uncheck", new Uncheck(alertOverride, elementFinder)); seleneseMethods.put("useXpathLibrary", new UseXPathLibrary()); seleneseMethods.put("waitForCondition", new WaitForCondition(scriptMutator)); seleneseMethods.put("waitForFrameToLoad", new NoOp(null)); seleneseMethods.put("waitForPageToLoad", new WaitForPageToLoad()); seleneseMethods.put("waitForPopUp", new WaitForPopup(windows)); seleneseMethods.put("windowFocus", new WindowFocus(javascriptLibrary)); seleneseMethods.put("windowMaximize", new WindowMaximize(javascriptLibrary)); } }
package com.miscitems.MiscItemsAndBlocks.TileEntity.Electric; import MiscItemsApi.Recipes.MetalPressRecipe; import MiscItemsApi.Recipes.RecipeHandler; import MiscUtils.Network.PacketHandler; import com.miscitems.MiscItemsAndBlocks.Main.Main; import com.miscitems.MiscItemsAndBlocks.Network.Client.ClientMetalPressPacketUpdate; import cpw.mods.fml.common.network.NetworkRegistry; import net.minecraft.inventory.ISidedInventory; import net.minecraft.item.ItemStack; import net.minecraft.nbt.NBTTagCompound; public class TileEntityMetalPress extends TileEntityPowerInv implements ISidedInventory{ public TileEntityMetalPress() { super(6, "Metal Press", 16); } public int WorkTime = 1; public int MaxWorkTime = 50; //1 = Mode 1x1 //2 = Mode 4x4 public int Mode = 1; private final int[] sidedSlotSides = new int[] { 1 }; private final int[] sidedSlotBottom = new int[] { 0 }; private final int[] sidedSlotTop = new int[] { 1 }; public int GetMode(){ return Mode; } public void SetMode(int i){ if(i == 1 || i == 2){ Mode = i; }else{ Mode = 1; } } public int GetWorkTime(){ return WorkTime; } public void SetWorkTime(int i){ if(i >= MaxWorkTime){ WorkTime = MaxWorkTime; }else{ WorkTime = i; } } public void updateEntity(){ if(!this.worldObj.isRemote){ if(Mode == 1 ? GetPower() > 10 : GetPower() > 20){ if(WorkTime <= MaxWorkTime ){ if(Mode == 1){ ItemStack[] stack = new ItemStack[]{this.getStackInSlot(1)}; if(stack != null && stack.length > 0 && stack[0] != null){ MetalPressRecipe pres = RecipeHandler.GetMetalPressRecipe(stack, Mode); if(pres != null) { ItemStack finishItem_1 = pres.Item; if (finishItem_1 != null && this.getStackInSlot(0) == null || finishItem_1 != null && this.getStackInSlot(0) != null && this.getStackInSlot(0).getItem() == finishItem_1.getItem() && this.getStackInSlot(0).stackSize < this.getInventoryStackLimit()) { WorkTimeAdd(); } else if (this.getStackInSlot(1) == null && Mode == 1 || finishItem_1 == null && Mode == 1) { SetWorkTimeUpdate(0); } } } }else if (Mode == 2){ ItemStack[] stacks = new ItemStack[]{this.getStackInSlot(2), this.getStackInSlot(3), this.getStackInSlot(4), this.getStackInSlot(5)}; if(stacks != null && stacks.length > 0) { MetalPressRecipe pres = RecipeHandler.GetMetalPressRecipe(stacks, Mode); if (pres != null){ ItemStack finishItem_4 = pres.Item; if (finishItem_4 != null && this.getStackInSlot(0) == null || finishItem_4 != null && this.getStackInSlot(0) != null && this.getStackInSlot(0).getItem() == finishItem_4.getItem() && this.getStackInSlot(0).stackSize < this.getInventoryStackLimit()) { WorkTimeAdd(); } else if (this.getStackInSlot(2) == null && Mode == 2 || this.getStackInSlot(3) == null && Mode == 2 || this.getStackInSlot(4) == null && Mode == 2 || this.getStackInSlot(5) == null && Mode == 2 || finishItem_4 == null && Mode == 2) { SetWorkTimeUpdate(0); } } } } }else{ WorkTimeReset(); if(Mode == 1){ MetalPressRecipe pres = RecipeHandler.GetMetalPressRecipe(new ItemStack[]{this.getStackInSlot(1) }, Mode); if(pres != null) { ItemStack FinishItem = pres.Item; this.decrStackSize(1, 1); this.SetPower(this.GetPower() - 10); if (this.getStackInSlot(0) == null) { this.setInventorySlotContents(0, FinishItem); } else if (this.getStackInSlot(0) != null && this.getStackInSlot(0).getItem() == FinishItem.getItem() && this.getStackInSlot(0).getItemDamage() == FinishItem.getItemDamage()) { if (this.getStackInSlot(0).stackSize < this.getInventoryStackLimit()) { this.setInventorySlotContents(0, new ItemStack(this.getStackInSlot(0).getItem(), this.getStackInSlot(0).stackSize + 1, this.getStackInSlot(0).getItemDamage())); } } } }else if (Mode == 2) { this.SetPower(this.GetPower() - 20); MetalPressRecipe pres = RecipeHandler.GetMetalPressRecipe(new ItemStack[]{this.getStackInSlot(2), this.getStackInSlot(3), this.getStackInSlot(4), this.getStackInSlot(5)}, Mode); if (pres != null){ ItemStack finishItem_4 = pres.Item; this.decrStackSize(2, 1); this.decrStackSize(3, 1); this.decrStackSize(4, 1); this.decrStackSize(5, 1); if (this.getStackInSlot(0) == null) { this.setInventorySlotContents(0, finishItem_4); } else if (this.getStackInSlot(0) != null && this.getStackInSlot(0).getItem() == finishItem_4.getItem() && this.getStackInSlot(0).getItemDamage() == finishItem_4.getItemDamage()) { if (this.getStackInSlot(0).stackSize < this.getInventoryStackLimit()) { this.setInventorySlotContents(0, new ItemStack(this.getStackInSlot(0).getItem(), this.getStackInSlot(0).stackSize + 1, this.getStackInSlot(0).getItemDamage())); } } } } } } } } public void readFromNBT(NBTTagCompound NBT) { super.readFromNBT(NBT); WorkTime = NBT.getInteger("WorkTime"); Mode = NBT.getInteger("Mode"); } public void writeToNBT(NBTTagCompound NBT) { super.writeToNBT(NBT); NBT.setInteger("WorkTime", WorkTime); NBT.setInteger("Mode", Mode); } public void receiveButtonEvent(byte buttonId) { if(buttonId == 1){ if(Mode == 1) Mode = 2; else if (Mode == 2) Mode = 1; } } @Override public int[] getAccessibleSlotsFromSide(int var1) { if(Mode == 1) return var1 == 0 ? sidedSlotBottom : new int[]{ 1 }; else if (Mode == 2) return var1 == 0 ? sidedSlotBottom : new int[]{ 2, 3, 4, 5 }; return var1 == 0 ? sidedSlotBottom : (var1 == 1 ? sidedSlotTop : sidedSlotSides); } @Override public boolean canInsertItem(int i, ItemStack itemstack, int j) { return this.isItemValidForSlot(i, itemstack); } @Override public boolean canExtractItem(int i, ItemStack itemstack, int j) { return j != 0 || i != 1 ; } public void WorkTimeAdd(){ WorkTime++; PacketHandler.sendToAllAround(new ClientMetalPressPacketUpdate(this.xCoord, this.yCoord, this.zCoord, WorkTime, false), new NetworkRegistry.TargetPoint(this.worldObj.getWorldInfo().getVanillaDimension(), xCoord, yCoord, zCoord, 100), Main.Utils.channels); } public void WorkTimeReset(){ WorkTime = 0; PacketHandler.sendToAllAround(new ClientMetalPressPacketUpdate(this.xCoord, this.yCoord, this.zCoord, WorkTime, true), new NetworkRegistry.TargetPoint(this.worldObj.getWorldInfo().getVanillaDimension(), xCoord, yCoord, zCoord, 100), Main.Utils.channels); } @Override public boolean CanAcceptPower() { return true; } public void SetWorkTimeUpdate(int i){ WorkTime = i; PacketHandler.sendToAllAround(new ClientMetalPressPacketUpdate(this.xCoord, this.yCoord, this.zCoord, WorkTime, false), new NetworkRegistry.TargetPoint(this.worldObj.getWorldInfo().getVanillaDimension(), xCoord, yCoord, zCoord, 100), Main.Utils.channels); } @Override public double GetMaxPower() { return 1000; } }
package mil.darpa.vande.generic; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; import java.util.HashSet; import java.util.List; import java.util.Set; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This object encapsulates the specification for a graph including the initial * search terms. * * @author PWG for DARPA */ @Deprecated public class V_GraphQuery { private static final Logger logger = LoggerFactory.getLogger(V_GraphQuery.class); private boolean directed = true; private Long endTime = null; private String id = null; // Used for logging and persistence purposes private int maxEdgesPerNode = 50; private int maxHops = 3; private int maxNodes = 200; private int minLinks = 1; private int minTransValue = 0; private Set<String> searchIds = new HashSet<String>(); private List<String> errors = new ArrayList<String>(); private Long startTime = null; private final long timeInitiated = new Date().getTime(); /* * This was added to because legacy graph builders used it as a separate * query parameter. --djue. It is usually something like customer or account */ private String type = null; private String userId = "unknown"; private String username = "unknown"; public V_GraphQuery() { } /** * @param searchIds * a Set of initial terms to search for. Must not be empty * @param startTime * Long - If non-null, ignore items prior to this time * @param endTime * Long - If non-null, ignore items prior to this time * @param minTransValue * - if non-null, ignore line items with a value of less than * this amount * @param minEdgeValue * - if non-null, ignore edges with a value of less than this * amount * @param minPairValue * - if non-null, ignore edges where the total value of edges * between the parties is less than this amount * @param maxEdgesPerNode * int. If a node has more than the specified number of edges, * return it tagged as a placeholder. * @param maxNodes * do not return a graph with more than this number of nodes. If * the number of nodes exceeds this number. reduce the degree * until a graph with fewer nodes is reached. If that is not * possible, return an error. * @param directed * boolean. If false, treat an edge from A to B and an edge from * B to A as the same edge. * @param minLinks * int do not show an edge betweem two nodes if there were fewer * than this number of interactions. * @param maxDegrees * int do not show more than this number of degrees. * **/ public V_GraphQuery(final Set<String> searchIds, final Long startTime, final Long endTime, final int minTransValue, final int maxEdgesPerNode, final int maxNodes, final boolean directed, final int minLinks, final int maxHops) { this.searchIds = searchIds; this.startTime = startTime; this.endTime = endTime; this.minTransValue = minTransValue; this.maxEdgesPerNode = maxEdgesPerNode; this.maxNodes = maxNodes; this.directed = directed; this.minLinks = minLinks; this.maxHops = maxHops; } public V_GraphQuery(final V_GraphQuery q) { directed = q.directed; endTime = q.endTime; maxEdgesPerNode = q.maxEdgesPerNode; maxHops = q.maxHops; maxNodes = q.maxNodes; minLinks = q.minLinks; searchIds = q.searchIds; startTime = q.startTime; type = q.type; } public void addSearchIds(final String... id) { if ((id == null) || (id.length == 0)) { logger.warn("null or empty id provided: " + Arrays.toString(id)); } else { for (final String x : id) { searchIds.add(x); } } } public Long getEndTime() { return endTime; } /* * * * * * * * * * * * * * * * * */ /* GETTERS */ /* * * * * * * * * * * * * * * * * */ /** * @return the errors */ public List<String> getErrors() { return errors; } public String getId() { return id; } public int getMaxEdgesPerNode() { return maxEdgesPerNode; } public int getMaxHops() { return maxHops; } public int getMaxNodes() { return maxNodes; } public int getMinLinks() { return minLinks; } public int getMinTransValue() { return minTransValue; } public Set<String> getSearchIds() { return searchIds; } public Long getStartTime() { return startTime; } public long getTimeInitiated() { return timeInitiated; } public String getType() { return type; } public String getUserId() { return userId; } public String getUsername() { return username; } public boolean isDirected() { return directed; } /* * * * * * * * * * * * * * * * * */ /* SETTERS */ /* * * * * * * * * * * * * * * * * */ public void setDirected(final boolean directed) { this.directed = directed; } public void setEndTime(final Long endTime) { this.endTime = endTime; } /** * @param errors * the errors to set */ public void setErrors(final List<String> errors) { this.errors = errors; } public void setId(final String id) { this.id = id; } public void setMaxEdgesPerNode(final int maxEdgesPerNode) { this.maxEdgesPerNode = maxEdgesPerNode; } public void setMaxHops(final int maxHops) { this.maxHops = maxHops; } public void setMaxNodes(final int maxNodes) { this.maxNodes = maxNodes; } public void setMinLinks(final int minLinks) { this.minLinks = minLinks; } public void setMinTransValue(final int minValue) { minTransValue = minValue; } public void setSearchIds(final Set<String> searchIds) { this.searchIds = searchIds; } public void setStartTime(final Long startTime) { this.startTime = startTime; } public void setType(final String type) { this.type = type; } public void setUserId(final String userId) { this.userId = userId; } public void setUsername(final String username) { this.username = username; } /* * (non-Javadoc) * * @see java.lang.Object#toString() */ @Override public String toString() { return "V_GraphQuery [directed=" + directed + ", endTime=" + endTime + ", id=" + id + ", maxEdgesPerNode=" + maxEdgesPerNode + ", maxHops=" + maxHops + ", maxNodes=" + maxNodes + ", minLinks=" + minLinks + ", username=" + username + ", userId=" + userId + ", minTransValue=" + minTransValue + ", timeInitiated=" + timeInitiated + ", searchIds=" + searchIds + ", startTime=" + startTime + ", type=" + type + "]"; } }