gt
stringclasses
1 value
context
stringlengths
2.05k
161k
package pt.ua.code.favouritetv.gui; import java.lang.ref.Reference; import java.lang.ref.SoftReference; import java.util.ConcurrentModificationException; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map.Entry; import pt.ua.code.favouritetv.R; import android.content.Context; import android.content.res.Resources; import android.graphics.Bitmap; import android.graphics.Bitmap.Config; import android.graphics.BitmapFactory; import android.graphics.BitmapFactory.Options; import android.os.Handler; import android.os.Message; import android.util.Log; public class AsyncImageLoader { private static HashMap<String, SoftReference<Bitmap>> imageCache; private static final AsyncImageLoader instance = new AsyncImageLoader(); private static final Thread controler[] = new Thread[8]; private static int threadCount = 0; private static final List<String> receivingUrlsImage = new LinkedList<String>(); private AsyncImageLoader() { imageCache = new HashMap<String, SoftReference<Bitmap>>(); } public static final AsyncImageLoader getInstance() { return instance; } public boolean isReceivingImage(String imageUrl) { return receivingUrlsImage.contains(imageUrl); } public static Bitmap getImageInCache(String imageUrl) { if (imageCache.containsKey(imageUrl)) { SoftReference<Bitmap> softReference = imageCache.get(imageUrl); Bitmap drawable = softReference.get(); if (drawable != null) { return drawable; } } return null; } public Bitmap loadDrawableFromResources(final Context context, final String imageUrl, final String res, final ImageCallback imageCallback) { Bitmap tmp = null; if ((tmp = getImageInCache(imageUrl)) != null) { return tmp; } receivingUrlsImage.add(imageUrl); final Handler handler = new Handler() { @Override public void handleMessage(Message message) { imageCallback.imageLoaded(((Bitmap) message.obj), imageUrl); receivingUrlsImage.remove(imageUrl); } }; if (controler[threadCount] != null) { try { if (controler[threadCount] != null) controler[threadCount].join(); if (controler[threadCount] != null) controler[threadCount].destroy(); } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } controler[threadCount] = null; } controler[threadCount] = new Thread() { private String localUrl; @Override public void run() { localUrl = imageUrl; Bitmap drawable = loadImageFromResources(context, res); imageCache.put(imageUrl, new SoftReference<Bitmap>(drawable)); Message message = handler.obtainMessage(0, drawable); handler.sendMessage(message); } @Override public void destroy() { // TODO Auto-generated method stub receivingUrlsImage.remove(localUrl); } }; controler[threadCount].setPriority(Thread.MIN_PRIORITY); controler[threadCount].start(); threadCount = (threadCount + 1) % controler.length; return null; } @SuppressWarnings("unchecked") private static void dropOneCacheEntry() { try { Iterator<Entry<String, SoftReference<Bitmap>>> it = imageCache.entrySet().iterator(); Object tmp; String key = null; if (it.hasNext()) { tmp = it.next(); if (tmp != null) { key = ((Entry<String, SoftReference<Bitmap>>) tmp).getKey(); tmp = ((Entry<String, SoftReference<Bitmap>>) tmp).getValue(); } if (tmp != null) tmp = ((Reference<Bitmap>) tmp).get(); if (tmp != null) if (tmp instanceof Bitmap) { // ((Bitmap) tmp).recycle(); tmp = null; Log.i("Recycling", "One bitmap Recycled"); } imageCache.remove(key); } } catch (ConcurrentModificationException e) { } } public static Bitmap loadImageFromResources(Context context, String resource) { Bitmap ret = null; Options opts = new Options(); opts.inPurgeable = true; opts.inPreferredConfig = Config.ARGB_8888; Resources res = context.getResources(); try { ret = BitmapFactory.decodeResource(res, res.getIdentifier("channel_" + formatResouceName(resource), "drawable", context.getPackageName()), opts); if (ret == null) { ret = BitmapFactory.decodeResource(res, R.drawable.channel_demo, opts); } } catch (OutOfMemoryError error) { for (int i = 0; i < 6; i++) { Log.i("Recycling", "One bitmap Recycled because of memory"); dropOneCacheEntry(); } System.gc(); try { Thread.sleep(200); } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } } catch (Resources.NotFoundException e) { ret = BitmapFactory.decodeResource(res, R.drawable.channel_demo, opts); } return ret; } private static String formatResouceName(String sigla) { StringBuffer result = new StringBuffer(); for (int i = 0; i < sigla.length(); i++) { char c = sigla.charAt(i); if ((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || (c >= '0' && c <= '9') || c == '_' || c == '.') { result.append(c); } } return result.toString().toLowerCase(); } public final void stopLoadingImages() { for (int i = 0; i < threadCount; i++) { if (controler[i] != null) { // try { controler[i].interrupt(); // controler[i].join(); // } catch (InterruptedException e) { // TODO Auto-generated catch block // e.printStackTrace(); // } controler[i] = null; } } System.gc(); } public interface ImageCallback { public void imageLoaded(Bitmap imageDrawable, String imageTag); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package network.aika.neuron.activation; import network.aika.Config; import network.aika.Thought; import network.aika.fields.*; import network.aika.neuron.Range; import network.aika.neuron.Synapse; import network.aika.sign.Sign; import network.aika.steps.link.Cleanup; import network.aika.steps.link.LinkCounting; import network.aika.steps.link.PropagateBindingSignal; import java.util.Comparator; import static network.aika.fields.ConstantDoubleField.ZERO; import static network.aika.fields.FieldUtils.*; import static network.aika.neuron.activation.Timestamp.NOT_SET_AFTER; /** * * @author Lukas Molzberger */ public class Link<S extends Synapse, I extends Activation, O extends Activation> extends Element<Link> { public static final Comparator<Link> COMPARE = Comparator. <Link, Activation<?>>comparing(l -> l.output) .thenComparing(l -> l.input); protected S synapse; protected final I input; protected final O output; private BiFunction igGradient; private AbstractBiFunction weightedInput; private AbstractBiFunction backPropGradient; private ThresholdOperator onVisible; public Link(S s, I input, O output) { this.synapse = s; this.input = input; this.output = output; init(); if(input != null && output != null) { initWeightInput(); if (getConfig().isTrainingEnabled() && !isNegative()) { igGradient = func("Information-Gain", input.net, output.net, (x1, x2) -> getRelativeSurprisal( Sign.getSign(x1), Sign.getSign(x2), input.getAbsoluteRange() ), output.getGradientInputFields() ); backPropGradient = mulUnregistered("oAct.og * s.weight", output.outputGradient, getWeightOutput()); } } getThought().onLinkCreationEvent(this); } protected void initWeightInput() { onVisible = threshold("onVisible", 0.0, getWeightOutput()); weightedInput = mulUnregistered( "iAct.value * s.weight", input.getValue(), getWeightOutput(), getOutput().getNet() ); } public void init() { if(getInput() != null) { linkInput(); PropagateBindingSignal.add(this); } if(getOutput() != null) linkOutput(); if(getConfig().isCountingEnabled()) LinkCounting.add(this); } public DoubleFieldOutput getInformationGainGradient() { return igGradient; } public DoubleFieldInput getWeightInput() { return synapse.getWeight(); } public DoubleFieldOutput getWeightOutput() { return synapse.getWeight(); } public AbstractBiFunction getWeightedInput() { return weightedInput; } public DoubleFieldOutput getBackPropGradient() { return backPropGradient; } public void updateWeight(double g) { double weightDelta = getConfig().getLearnRate() * g; boolean oldWeightIsZero = synapse.isZero(); if(isTemplate()) induce(); synapse.updateWeight(this, weightDelta); if (oldWeightIsZero && !synapse.isZero() && getInput().isFired()) PropagateBindingSignal.add(this); } public void backPropagate() { if(backPropGradient != null) backPropGradient.triggerUpdate(1); } public void receiveWeightUpdate() { weightedInput.triggerUpdate(2); if(backPropGradient != null) backPropGradient.triggerUpdate(2); } @Override public Timestamp getFired() { return isCausal() ? input.getFired() : output.getFired(); } public static boolean linkExists(Activation iAct, Activation oAct) { Link existingLink = oAct.getInputLink(iAct.getNeuron()); return existingLink != null && existingLink.getInput() == iAct; } public static boolean templateLinkExists(Synapse ts, Activation iAct, Activation oAct) { Link l = oAct.getInputLink(iAct.getNeuron()); if(l == null) return false; return l.getSynapse().isOfTemplate(ts); } public double getRelativeSurprisal(Sign si, Sign so, Range range) { double s = synapse.getSurprisal(si, so, range, true); s -= input.getNeuron().getSurprisal(si, range, true); s -= output.getNeuron().getSurprisal(so, range, true); return s; } public DoubleFieldOutput getInputValue(Sign s) { return s.getValue(input != null ? input.getValue() : ZERO); } public S getSynapse() { return synapse; } public void setSynapse(S synapse) { this.synapse = synapse; } public I getInput() { return input; } public O getOutput() { return output; } public boolean isSelfRef() { return output.isSelfRef(input); } public boolean isTemplate() { return getSynapse().isTemplate(); } public boolean isRecurrent() { return synapse.isRecurrent(); } public boolean isCausal() { return input == null || isCausal(input, output); } public static boolean isCausal(Activation iAct, Activation oAct) { return NOT_SET_AFTER.compare(iAct.getFired(), oAct.getFired()) < 0; } public void induce() { assert isTemplate(); if(output.isTemplate()) output.induce(); synapse = (S) synapse .instantiateTemplate( input.getNeuron(), output.getNeuron() ); synapse.linkOutput(); Cleanup.add(this); } public void linkInput() { if(input == null) return; input.outputLinks.put( new OutputKey(output.getNeuronProvider(), output.getId()), this ); } public void setFinalMode() { } public void linkOutput() { output.inputLinks.put(input != null ? input.getNeuronProvider() : synapse.getPInput(), this); } public void unlinkInput() { OutputKey ok = output.getOutputKey(); input.outputLinks.remove(ok, this); } public void unlinkOutput() { output.inputLinks.remove(input.getNeuronProvider(), this); } public boolean isNegative() { return synapse.isNegative(); } @Override public Thought getThought() { return output.getThought(); } @Override public Config getConfig() { return output.getConfig(); } @Override public int compareTo(Link l) { return COMPARE.compare(this, l); } public String toString() { return (isTemplate() ? "Template-" : "") + getClass().getSimpleName() + " in:[" + getInputKeyString() + "] " + "--> " + "out:[" + getOutputKeyString() + "]"; } private String getInputKeyString() { return (input != null ? input.toKeyString() : "id:X n:[" + synapse.getInput() + "]"); } private String getOutputKeyString() { return (output != null ? output.toKeyString() : "id:X n:[" + synapse.getOutput() + "]"); } }
/* * Copyright (c) 2011, Oracle and/or its affiliates. All rights reserved. * ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms. */ /* * Copyright 2001-2004 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * $Id: StepPattern.java,v 1.2.4.1 2005/09/12 11:13:19 pvedula Exp $ */ package com.sun.org.apache.xalan.internal.xsltc.compiler; import java.util.Vector; import com.sun.org.apache.bcel.internal.classfile.Field; import com.sun.org.apache.bcel.internal.generic.ALOAD; import com.sun.org.apache.bcel.internal.generic.ASTORE; import com.sun.org.apache.bcel.internal.generic.BranchHandle; import com.sun.org.apache.bcel.internal.generic.ConstantPoolGen; import com.sun.org.apache.bcel.internal.generic.GETFIELD; import com.sun.org.apache.bcel.internal.generic.GOTO; import com.sun.org.apache.bcel.internal.generic.GOTO_W; import com.sun.org.apache.bcel.internal.generic.IFLT; import com.sun.org.apache.bcel.internal.generic.IFNE; import com.sun.org.apache.bcel.internal.generic.IFNONNULL; import com.sun.org.apache.bcel.internal.generic.IF_ICMPEQ; import com.sun.org.apache.bcel.internal.generic.IF_ICMPLT; import com.sun.org.apache.bcel.internal.generic.IF_ICMPNE; import com.sun.org.apache.bcel.internal.generic.ILOAD; import com.sun.org.apache.bcel.internal.generic.INVOKEINTERFACE; import com.sun.org.apache.bcel.internal.generic.INVOKESPECIAL; import com.sun.org.apache.bcel.internal.generic.ISTORE; import com.sun.org.apache.bcel.internal.generic.InstructionHandle; import com.sun.org.apache.bcel.internal.generic.InstructionList; import com.sun.org.apache.bcel.internal.generic.LocalVariableGen; import com.sun.org.apache.bcel.internal.generic.NEW; import com.sun.org.apache.bcel.internal.generic.PUSH; import com.sun.org.apache.bcel.internal.generic.PUTFIELD; import com.sun.org.apache.xalan.internal.xsltc.compiler.util.ClassGenerator; import com.sun.org.apache.xalan.internal.xsltc.compiler.util.MethodGenerator; import com.sun.org.apache.xalan.internal.xsltc.compiler.util.Type; import com.sun.org.apache.xalan.internal.xsltc.compiler.util.TypeCheckError; import com.sun.org.apache.xalan.internal.xsltc.compiler.util.Util; import com.sun.org.apache.xml.internal.dtm.Axis; import com.sun.org.apache.xml.internal.dtm.DTM; /** * @author Jacek Ambroziak * @author Santiago Pericas-Geertsen * @author Erwin Bolwidt <ejb@klomp.org> */ class StepPattern extends RelativePathPattern { private static final int NO_CONTEXT = 0; private static final int SIMPLE_CONTEXT = 1; private static final int GENERAL_CONTEXT = 2; protected final int _axis; protected final int _nodeType; protected Vector _predicates; private Step _step = null; private boolean _isEpsilon = false; private int _contextCase; private double _priority = Double.MAX_VALUE; public StepPattern(int axis, int nodeType, Vector predicates) { _axis = axis; _nodeType = nodeType; _predicates = predicates; } public void setParser(Parser parser) { super.setParser(parser); if (_predicates != null) { final int n = _predicates.size(); for (int i = 0; i < n; i++) { final Predicate exp = (Predicate)_predicates.elementAt(i); exp.setParser(parser); exp.setParent(this); } } } public int getNodeType() { return _nodeType; } public void setPriority(double priority) { _priority = priority; } public StepPattern getKernelPattern() { return this; } public boolean isWildcard() { return _isEpsilon && hasPredicates() == false; } public StepPattern setPredicates(Vector predicates) { _predicates = predicates; return(this); } protected boolean hasPredicates() { return _predicates != null && _predicates.size() > 0; } public double getDefaultPriority() { if (_priority != Double.MAX_VALUE) { return _priority; } if (hasPredicates()) { return 0.5; } else { switch(_nodeType) { case -1: return -0.5; // node() case 0: return 0.0; default: return (_nodeType >= NodeTest.GTYPE) ? 0.0 : -0.5; } } } public int getAxis() { return _axis; } public void reduceKernelPattern() { _isEpsilon = true; } public String toString() { final StringBuffer buffer = new StringBuffer("stepPattern(\""); buffer.append(Axis.getNames(_axis)) .append("\", ") .append(_isEpsilon ? ("epsilon{" + Integer.toString(_nodeType) + "}") : Integer.toString(_nodeType)); if (_predicates != null) buffer.append(", ").append(_predicates.toString()); return buffer.append(')').toString(); } private int analyzeCases() { boolean noContext = true; final int n = _predicates.size(); for (int i = 0; i < n && noContext; i++) { Predicate pred = (Predicate) _predicates.elementAt(i); if (pred.isNthPositionFilter() || pred.hasPositionCall() || pred.hasLastCall()) { noContext = false; } } if (noContext) { return NO_CONTEXT; } else if (n == 1) { return SIMPLE_CONTEXT; } return GENERAL_CONTEXT; } private String getNextFieldName() { return "__step_pattern_iter_" + getXSLTC().nextStepPatternSerial(); } public Type typeCheck(SymbolTable stable) throws TypeCheckError { if (hasPredicates()) { // Type check all the predicates (e -> position() = e) final int n = _predicates.size(); for (int i = 0; i < n; i++) { final Predicate pred = (Predicate)_predicates.elementAt(i); pred.typeCheck(stable); } // Analyze context cases _contextCase = analyzeCases(); Step step = null; // Create an instance of Step to do the translation if (_contextCase == SIMPLE_CONTEXT) { Predicate pred = (Predicate)_predicates.elementAt(0); if (pred.isNthPositionFilter()) { _contextCase = GENERAL_CONTEXT; step = new Step(_axis, _nodeType, _predicates); } else { step = new Step(_axis, _nodeType, null); } } else if (_contextCase == GENERAL_CONTEXT) { final int len = _predicates.size(); for (int i = 0; i < len; i++) { ((Predicate)_predicates.elementAt(i)).dontOptimize(); } step = new Step(_axis, _nodeType, _predicates); } if (step != null) { step.setParser(getParser()); step.typeCheck(stable); _step = step; } } return _axis == Axis.CHILD ? Type.Element : Type.Attribute; } private void translateKernel(ClassGenerator classGen, MethodGenerator methodGen) { final ConstantPoolGen cpg = classGen.getConstantPool(); final InstructionList il = methodGen.getInstructionList(); if (_nodeType == DTM.ELEMENT_NODE) { final int check = cpg.addInterfaceMethodref(DOM_INTF, "isElement", "(I)Z"); il.append(methodGen.loadDOM()); il.append(SWAP); il.append(new INVOKEINTERFACE(check, 2)); // Need to allow for long jumps here final BranchHandle icmp = il.append(new IFNE(null)); _falseList.add(il.append(new GOTO_W(null))); icmp.setTarget(il.append(NOP)); } else if (_nodeType == DTM.ATTRIBUTE_NODE) { final int check = cpg.addInterfaceMethodref(DOM_INTF, "isAttribute", "(I)Z"); il.append(methodGen.loadDOM()); il.append(SWAP); il.append(new INVOKEINTERFACE(check, 2)); // Need to allow for long jumps here final BranchHandle icmp = il.append(new IFNE(null)); _falseList.add(il.append(new GOTO_W(null))); icmp.setTarget(il.append(NOP)); } else { // context node is on the stack final int getEType = cpg.addInterfaceMethodref(DOM_INTF, "getExpandedTypeID", "(I)I"); il.append(methodGen.loadDOM()); il.append(SWAP); il.append(new INVOKEINTERFACE(getEType, 2)); il.append(new PUSH(cpg, _nodeType)); // Need to allow for long jumps here final BranchHandle icmp = il.append(new IF_ICMPEQ(null)); _falseList.add(il.append(new GOTO_W(null))); icmp.setTarget(il.append(NOP)); } } private void translateNoContext(ClassGenerator classGen, MethodGenerator methodGen) { final ConstantPoolGen cpg = classGen.getConstantPool(); final InstructionList il = methodGen.getInstructionList(); // Push current node on the stack il.append(methodGen.loadCurrentNode()); il.append(SWAP); // Overwrite current node with matching node il.append(methodGen.storeCurrentNode()); // If pattern not reduced then check kernel if (!_isEpsilon) { il.append(methodGen.loadCurrentNode()); translateKernel(classGen, methodGen); } // Compile the expressions within the predicates final int n = _predicates.size(); for (int i = 0; i < n; i++) { Predicate pred = (Predicate)_predicates.elementAt(i); Expression exp = pred.getExpr(); exp.translateDesynthesized(classGen, methodGen); _trueList.append(exp._trueList); _falseList.append(exp._falseList); } // Backpatch true list and restore current iterator/node InstructionHandle restore; restore = il.append(methodGen.storeCurrentNode()); backPatchTrueList(restore); BranchHandle skipFalse = il.append(new GOTO(null)); // Backpatch false list and restore current iterator/node restore = il.append(methodGen.storeCurrentNode()); backPatchFalseList(restore); _falseList.add(il.append(new GOTO(null))); // True list falls through skipFalse.setTarget(il.append(NOP)); } private void translateSimpleContext(ClassGenerator classGen, MethodGenerator methodGen) { int index; final ConstantPoolGen cpg = classGen.getConstantPool(); final InstructionList il = methodGen.getInstructionList(); // Store matching node into a local variable LocalVariableGen match; match = methodGen.addLocalVariable("step_pattern_tmp1", Util.getJCRefType(NODE_SIG), null, null); match.setStart(il.append(new ISTORE(match.getIndex()))); // If pattern not reduced then check kernel if (!_isEpsilon) { il.append(new ILOAD(match.getIndex())); translateKernel(classGen, methodGen); } // Push current iterator and current node on the stack il.append(methodGen.loadCurrentNode()); il.append(methodGen.loadIterator()); // Create a new matching iterator using the matching node index = cpg.addMethodref(MATCHING_ITERATOR, "<init>", "(I" + NODE_ITERATOR_SIG + ")V"); // Backwards branches are prohibited if an uninitialized object is // on the stack by section 4.9.4 of the JVM Specification, 2nd Ed. // We don't know whether this code might contain backwards branches, // so we mustn't create the new object until after we've created // the suspect arguments to its constructor. Instead we calculate // the values of the arguments to the constructor first, store them // in temporary variables, create the object and reload the // arguments from the temporaries to avoid the problem. _step.translate(classGen, methodGen); LocalVariableGen stepIteratorTemp = methodGen.addLocalVariable("step_pattern_tmp2", Util.getJCRefType(NODE_ITERATOR_SIG), null, null); stepIteratorTemp.setStart( il.append(new ASTORE(stepIteratorTemp.getIndex()))); il.append(new NEW(cpg.addClass(MATCHING_ITERATOR))); il.append(DUP); il.append(new ILOAD(match.getIndex())); stepIteratorTemp.setEnd( il.append(new ALOAD(stepIteratorTemp.getIndex()))); il.append(new INVOKESPECIAL(index)); // Get the parent of the matching node il.append(methodGen.loadDOM()); il.append(new ILOAD(match.getIndex())); index = cpg.addInterfaceMethodref(DOM_INTF, GET_PARENT, GET_PARENT_SIG); il.append(new INVOKEINTERFACE(index, 2)); // Start the iterator with the parent il.append(methodGen.setStartNode()); // Overwrite current iterator and current node il.append(methodGen.storeIterator()); match.setEnd(il.append(new ILOAD(match.getIndex()))); il.append(methodGen.storeCurrentNode()); // Translate the expression of the predicate Predicate pred = (Predicate) _predicates.elementAt(0); Expression exp = pred.getExpr(); exp.translateDesynthesized(classGen, methodGen); // Backpatch true list and restore current iterator/node InstructionHandle restore = il.append(methodGen.storeIterator()); il.append(methodGen.storeCurrentNode()); exp.backPatchTrueList(restore); BranchHandle skipFalse = il.append(new GOTO(null)); // Backpatch false list and restore current iterator/node restore = il.append(methodGen.storeIterator()); il.append(methodGen.storeCurrentNode()); exp.backPatchFalseList(restore); _falseList.add(il.append(new GOTO(null))); // True list falls through skipFalse.setTarget(il.append(NOP)); } private void translateGeneralContext(ClassGenerator classGen, MethodGenerator methodGen) { final ConstantPoolGen cpg = classGen.getConstantPool(); final InstructionList il = methodGen.getInstructionList(); int iteratorIndex = 0; BranchHandle ifBlock = null; LocalVariableGen iter, node, node2; final String iteratorName = getNextFieldName(); // Store node on the stack into a local variable node = methodGen.addLocalVariable("step_pattern_tmp1", Util.getJCRefType(NODE_SIG), null, null); node.setStart(il.append(new ISTORE(node.getIndex()))); // Create a new local to store the iterator iter = methodGen.addLocalVariable("step_pattern_tmp2", Util.getJCRefType(NODE_ITERATOR_SIG), null, null); // Add a new private field if this is the main class if (!classGen.isExternal()) { final Field iterator = new Field(ACC_PRIVATE, cpg.addUtf8(iteratorName), cpg.addUtf8(NODE_ITERATOR_SIG), null, cpg.getConstantPool()); classGen.addField(iterator); iteratorIndex = cpg.addFieldref(classGen.getClassName(), iteratorName, NODE_ITERATOR_SIG); il.append(classGen.loadTranslet()); il.append(new GETFIELD(iteratorIndex)); il.append(DUP); iter.setStart(il.append(new ASTORE(iter.getIndex()))); ifBlock = il.append(new IFNONNULL(null)); il.append(classGen.loadTranslet()); } // Compile the step created at type checking time _step.translate(classGen, methodGen); InstructionHandle iterStore = il.append(new ASTORE(iter.getIndex())); // If in the main class update the field too if (!classGen.isExternal()) { il.append(new ALOAD(iter.getIndex())); il.append(new PUTFIELD(iteratorIndex)); ifBlock.setTarget(il.append(NOP)); } else { // If class is not external, start of range for iter variable was // set above iter.setStart(iterStore); } // Get the parent of the node on the stack il.append(methodGen.loadDOM()); il.append(new ILOAD(node.getIndex())); int index = cpg.addInterfaceMethodref(DOM_INTF, GET_PARENT, GET_PARENT_SIG); il.append(new INVOKEINTERFACE(index, 2)); // Initialize the iterator with the parent il.append(new ALOAD(iter.getIndex())); il.append(SWAP); il.append(methodGen.setStartNode()); /* * Inline loop: * * int node2; * while ((node2 = iter.next()) != NodeIterator.END * && node2 < node); * return node2 == node; */ BranchHandle skipNext; InstructionHandle begin, next; node2 = methodGen.addLocalVariable("step_pattern_tmp3", Util.getJCRefType(NODE_SIG), null, null); skipNext = il.append(new GOTO(null)); next = il.append(new ALOAD(iter.getIndex())); node2.setStart(next); begin = il.append(methodGen.nextNode()); il.append(DUP); il.append(new ISTORE(node2.getIndex())); _falseList.add(il.append(new IFLT(null))); // NodeIterator.END il.append(new ILOAD(node2.getIndex())); il.append(new ILOAD(node.getIndex())); iter.setEnd(il.append(new IF_ICMPLT(next))); node2.setEnd(il.append(new ILOAD(node2.getIndex()))); node.setEnd(il.append(new ILOAD(node.getIndex()))); _falseList.add(il.append(new IF_ICMPNE(null))); skipNext.setTarget(begin); } public void translate(ClassGenerator classGen, MethodGenerator methodGen) { final ConstantPoolGen cpg = classGen.getConstantPool(); final InstructionList il = methodGen.getInstructionList(); if (hasPredicates()) { switch (_contextCase) { case NO_CONTEXT: translateNoContext(classGen, methodGen); break; case SIMPLE_CONTEXT: translateSimpleContext(classGen, methodGen); break; default: translateGeneralContext(classGen, methodGen); break; } } else if (isWildcard()) { il.append(POP); // true list falls through } else { translateKernel(classGen, methodGen); } } }
/* * Copyright 2020 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.pmml.models.regression.compiler.factories; import java.io.IOException; import java.util.AbstractMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.concurrent.atomic.AtomicInteger; import com.github.javaparser.ast.CompilationUnit; import com.github.javaparser.ast.NodeList; import com.github.javaparser.ast.body.ClassOrInterfaceDeclaration; import com.github.javaparser.ast.body.ConstructorDeclaration; import com.github.javaparser.ast.body.MethodDeclaration; import com.github.javaparser.ast.expr.AssignExpr; import com.github.javaparser.ast.expr.BooleanLiteralExpr; import com.github.javaparser.ast.expr.Expression; import com.github.javaparser.ast.expr.MethodCallExpr; import com.github.javaparser.ast.expr.NameExpr; import com.github.javaparser.ast.expr.ObjectCreationExpr; import com.github.javaparser.ast.expr.SimpleName; import com.github.javaparser.ast.expr.StringLiteralExpr; import com.github.javaparser.ast.stmt.BlockStmt; import com.github.javaparser.ast.stmt.ReturnStmt; import org.dmg.pmml.OpType; import org.dmg.pmml.regression.RegressionModel; import org.dmg.pmml.regression.RegressionTable; import org.kie.pmml.commons.exceptions.KiePMMLInternalException; import org.kie.pmml.commons.model.KiePMMLOutputField; import org.kie.pmml.commons.model.enums.OP_TYPE; import org.kie.pmml.models.regression.model.enums.REGRESSION_NORMALIZATION_METHOD; import org.kie.pmml.models.regression.model.tuples.KiePMMLTableSourceCategory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static org.kie.pmml.commons.Constants.MISSING_BODY_TEMPLATE; import static org.kie.pmml.compiler.commons.utils.JavaParserUtils.getFromFileName; import static org.kie.pmml.models.regression.compiler.factories.KiePMMLRegressionTableRegressionFactory.addMethod; import static org.kie.pmml.models.regression.compiler.factories.KiePMMLRegressionTableRegressionFactory.populateGetTargetCategory; public class KiePMMLRegressionTableClassificationFactory { public static final String KIE_PMML_REGRESSION_TABLE_CLASSIFICATION_TEMPLATE_JAVA = "KiePMMLRegressionTableClassificationTemplate.tmpl"; private static final Logger logger = LoggerFactory.getLogger(KiePMMLRegressionTableClassificationFactory.class.getName()); private static final String MAIN_CLASS_NOT_FOUND = "Main class not found"; private static final String KIE_PMML_GET_PROBABILITY_MAP_METHOD_TEMPLATE_JAVA = "KiePMMLGetProbabilityMapMethodTemplate.tmpl"; private static final String KIE_PMML_GET_PROBABILITY_MAP_METHOD_TEMPLATE = "KiePMMLGetProbabilityMapMethodTemplate"; private static final String KIE_PMML_REGRESSION_TABLE_CLASSIFICATION_TEMPLATE = "KiePMMLRegressionTableClassificationTemplate"; private static AtomicInteger classArity = new AtomicInteger(0); private static CompilationUnit templateEvaluate; private static CompilationUnit cloneEvaluate; private KiePMMLRegressionTableClassificationFactory() { // Avoid instantiation } public static Map<String, KiePMMLTableSourceCategory> getRegressionTables(final List<RegressionTable> regressionTables, final RegressionModel.NormalizationMethod normalizationMethod, final OpType opType, final List<KiePMMLOutputField> outputFields, final String targetField, final String packageName) throws IOException { logger.trace("getRegressionTables {}", regressionTables); CompilationUnit templateCU = getFromFileName(KIE_PMML_REGRESSION_TABLE_CLASSIFICATION_TEMPLATE_JAVA); Map<String, KiePMMLTableSourceCategory> toReturn = KiePMMLRegressionTableRegressionFactory.getRegressionTables(regressionTables, RegressionModel.NormalizationMethod.NONE, targetField, packageName); Map.Entry<String, String> regressionTableEntry = getRegressionTable(templateCU, toReturn, normalizationMethod, opType, outputFields, targetField, packageName); toReturn.put(regressionTableEntry.getKey(), new KiePMMLTableSourceCategory(regressionTableEntry.getValue(), "")); return toReturn; } public static Map.Entry<String, String> getRegressionTable(final CompilationUnit templateCU, final Map<String, KiePMMLTableSourceCategory> regressionTablesMap, final RegressionModel.NormalizationMethod normalizationMethod, final OpType opType, final List<KiePMMLOutputField> outputFields, final String targetField, final String packageName) throws IOException { logger.trace("getRegressionTable {}", regressionTablesMap); CompilationUnit cloneCU = templateCU.clone(); cloneCU.setPackageDeclaration(packageName); final REGRESSION_NORMALIZATION_METHOD regressionNormalizationMethod = REGRESSION_NORMALIZATION_METHOD.byName(normalizationMethod.value()); final OP_TYPE op_type = opType != null ? OP_TYPE.byName(opType.value()) : null; ClassOrInterfaceDeclaration tableTemplate = cloneCU.getClassByName(KIE_PMML_REGRESSION_TABLE_CLASSIFICATION_TEMPLATE) .orElseThrow(() -> new RuntimeException(MAIN_CLASS_NOT_FOUND)); String className = "KiePMMLRegressionTableClassification" + classArity.addAndGet(1); tableTemplate.setName(className); populateGetProbabilityMapMethod(normalizationMethod, tableTemplate); populateOutputFieldsMap(tableTemplate, outputFields); populateIsBinaryMethod(opType, regressionTablesMap.size(), tableTemplate); final ConstructorDeclaration constructorDeclaration = tableTemplate.getDefaultConstructor().orElseThrow(() -> new KiePMMLInternalException(String.format("Missing default constructor in ClassOrInterfaceDeclaration %s ", tableTemplate.getName()))); setConstructor(constructorDeclaration, tableTemplate.getName(), targetField, regressionNormalizationMethod, op_type); addMapPopulation(constructorDeclaration.getBody(), regressionTablesMap); populateGetTargetCategory(tableTemplate, null); return new AbstractMap.SimpleEntry<>(className, cloneCU.toString()); } /** * Set the <b>targetField</b> values inside the constructor * @param constructorDeclaration * @param generatedClassName * @param targetField */ private static void setConstructor(final ConstructorDeclaration constructorDeclaration, final SimpleName generatedClassName, final String targetField, final REGRESSION_NORMALIZATION_METHOD regressionNormalizationMethod, final OP_TYPE opType) { constructorDeclaration.setName(generatedClassName); final BlockStmt body = constructorDeclaration.getBody(); final List<AssignExpr> assignExprs = body.findAll(AssignExpr.class); assignExprs.forEach(assignExpr -> { final String propertyName = assignExpr.getTarget().asNameExpr().getNameAsString(); switch (propertyName) { case "targetField": assignExpr.setValue(new StringLiteralExpr(targetField)); break; case "regressionNormalizationMethod": assignExpr.setValue(new NameExpr(regressionNormalizationMethod.getClass().getSimpleName() + "." + regressionNormalizationMethod.name())); break; case "opType": if (opType != null) { assignExpr.setValue(new NameExpr(opType.getClass().getSimpleName() + "." + opType.name())); } break; default: logger.warn("Unexpected property inside the constructor: {}", propertyName); } }); } /** * Add entries <b>category/KiePMMLRegressionTable</b> inside the constructor * @param body * @param regressionTablesMap */ private static void addMapPopulation(final BlockStmt body, final Map<String, KiePMMLTableSourceCategory> regressionTablesMap) { regressionTablesMap.forEach((className, tableSourceCategory) -> { ObjectCreationExpr objectCreationExpr = new ObjectCreationExpr(); objectCreationExpr.setType(className); NodeList<Expression> expressions = NodeList.nodeList(new StringLiteralExpr(tableSourceCategory.getCategory()), objectCreationExpr); body.addStatement(new MethodCallExpr(new NameExpr("categoryTableMap"), "put", expressions)); }); } /** * Add entries <b>output field/output value</b> inside <b>populateOutputFieldsMap</b> method * @param tableTemplate * @param outputFields */ private static void populateOutputFieldsMap(final ClassOrInterfaceDeclaration tableTemplate, final List<KiePMMLOutputField> outputFields) { final MethodDeclaration methodDeclaration = tableTemplate.getMethodsByName("populateOutputFieldsMap").get(0); final BlockStmt body = methodDeclaration.getBody().orElseThrow(() -> new KiePMMLInternalException(String.format(MISSING_BODY_TEMPLATE, methodDeclaration.getName()))); populateOutputFieldsMap(body, outputFields); } /** * Add entries <b>output field/output value</b> inside <b>populateOutputFieldsMap</b> method * @param body * @param outputFields */ private static void populateOutputFieldsMap(final BlockStmt body, final List<KiePMMLOutputField> outputFields) { outputFields.forEach(outputField -> { StringLiteralExpr key = new StringLiteralExpr(outputField.getName()); Expression value = null; switch (outputField.getResultFeature()) { case PREDICTED_VALUE: value = new MethodCallExpr(new NameExpr("predictedEntry"), "getKey"); break; case PROBABILITY: if (outputField.getValue() != null) { NodeList<Expression> expressions = NodeList.nodeList(new StringLiteralExpr(outputField.getValue().toString())); value = new MethodCallExpr(new NameExpr("probabilityMap"), "get", expressions); } else if (outputField.getTargetField().isPresent()) { NodeList<Expression> expressions = NodeList.nodeList(new StringLiteralExpr(outputField.getTargetField().get())); value = new MethodCallExpr(new NameExpr("probabilityMap"), "get", expressions); } break; default: // All other possibilities not managed, yet throw new KiePMMLInternalException(String.format("%s not managed, yet!", outputField.getResultFeature())); } if (value != null) { NodeList<Expression> expressions = NodeList.nodeList(key, value); body.addStatement(new MethodCallExpr(new NameExpr("outputFieldsMap"), "put", expressions)); } }); } /** * Add the <b>getProbabilityMapMethod</b>s <code>MethodDeclaration</code> to the class * @param normalizationMethod * @param tableTemplate * @return */ private static void populateGetProbabilityMapMethod(final RegressionModel.NormalizationMethod normalizationMethod, final ClassOrInterfaceDeclaration tableTemplate) { try { String methodName = String.format("get%sProbabilityMap", normalizationMethod.name()); templateEvaluate = getFromFileName(KIE_PMML_GET_PROBABILITY_MAP_METHOD_TEMPLATE_JAVA); cloneEvaluate = templateEvaluate.clone(); ClassOrInterfaceDeclaration evaluateTemplateClass = cloneEvaluate.getClassByName(KIE_PMML_GET_PROBABILITY_MAP_METHOD_TEMPLATE) .orElseThrow(() -> new RuntimeException(MAIN_CLASS_NOT_FOUND)); final MethodDeclaration toReturn = evaluateTemplateClass.getMethodsByName(methodName).get(0); addMethod(toReturn, tableTemplate, "getProbabilityMap"); } catch (Exception e) { throw new KiePMMLInternalException(e.getMessage()); } } /** * Populate the <b>isBinary</b> <code>MethodDeclaration</code> of the class * @param opType * @param size * @param tableTemplate * @return */ private static void populateIsBinaryMethod(final OpType opType, int size, final ClassOrInterfaceDeclaration tableTemplate) { try { final MethodDeclaration methodDeclaration = tableTemplate.getMethodsByName("isBinary").get(0); boolean toReturn = Objects.equals(OpType.CATEGORICAL, opType) && size == 2; BlockStmt blockStmt = new BlockStmt(); blockStmt.addStatement(new ReturnStmt(new BooleanLiteralExpr(toReturn))); methodDeclaration.setBody(blockStmt); } catch (Exception e) { throw new KiePMMLInternalException(e.getMessage()); } } }
/* * Copyright 2015-2016 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.stream.app.tasklaunchrequest.transform.processor; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.boot.test.IntegrationTest; import org.springframework.boot.test.SpringApplicationConfiguration; import org.springframework.boot.test.WebIntegrationTest; import org.springframework.cloud.stream.annotation.Bindings; import org.springframework.cloud.stream.messaging.Processor; import org.springframework.cloud.stream.test.binder.MessageCollector; import org.springframework.cloud.task.launcher.TaskLaunchRequest; import org.springframework.integration.transformer.MessageTransformationException; import org.springframework.messaging.support.GenericMessage; import org.springframework.test.annotation.DirtiesContext; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertThat; import static org.springframework.cloud.stream.test.matcher.MessageQueueMatcher.receivesPayloadThat; /** * Tests for TasklaunchrequestTransformIntegrationProcessor. * * @author Glenn Renfro */ @RunWith(SpringJUnit4ClassRunner.class) @SpringApplicationConfiguration(classes = TasklaunchrequestTransformProcessorIntegrationTests. TasklaunchrequestTransformProcessorApplication.class) @WebIntegrationTest(randomPort = true) @DirtiesContext public abstract class TasklaunchrequestTransformProcessorIntegrationTests { public static final String DEFAULT_URI = "MY_URI"; @Autowired @Bindings(TasklaunchrequestTransformProcessorConfiguration.class) protected Processor channels; @Autowired protected MessageCollector collector; /** * Validates that the app loads with default properties. */ @IntegrationTest({"uri=" + DEFAULT_URI}) public static class UsingDefaultIntegrationTests extends TasklaunchrequestTransformProcessorIntegrationTests { @Test public void test() throws InterruptedException{ channels.input().send(new GenericMessage<Object>("hello")); channels.input().send(new GenericMessage<Object>("hello world")); channels.input().send(new GenericMessage<Object>("hi!")); assertThat(collector.forChannel(channels.output()), receivesPayloadThat(is(getDefaultRequest()))); assertThat(collector.forChannel(channels.output()), receivesPayloadThat(is(getDefaultRequest()))); assertThat(collector.forChannel(channels.output()), receivesPayloadThat(is(getDefaultRequest()))); } } /** * Validates that the app fails without URI. */ public static class UsingNoURIIntegrationTests extends TasklaunchrequestTransformProcessorIntegrationTests { @Test(expected = MessageTransformationException.class) public void test() throws InterruptedException{ channels.input().send(new GenericMessage<Object>("hello")); } } /** * Validates that the app handles empty payload. */ @IntegrationTest({"uri=" + DEFAULT_URI}) public static class UsingEmptyPayloadIntegrationTests extends TasklaunchrequestTransformProcessorIntegrationTests { @Test() public void test() throws InterruptedException{ channels.input().send(new GenericMessage<Object>("")); assertThat(collector.forChannel(channels.output()), receivesPayloadThat(is(getDefaultRequest()))); } } /** * Verify datasource properties are added to the TaskLaunchRequest. */ @IntegrationTest({"dataSourceUrl=myUrl", "dataSourcePassword=myPassword", "dataSourceUserName=myUserName", "dataSourceDriverClassName=myClassName", "uri=" + DEFAULT_URI}) public static class UsingDataSourceIntegrationTests extends TasklaunchrequestTransformProcessorIntegrationTests { @Test public void test() throws InterruptedException { channels.input().send(new GenericMessage<Object>("hello")); Map<String, String> environmentVariables = new HashMap<>(4); environmentVariables.put("spring_datasource_url", "myUrl"); environmentVariables.put("spring_datasource_driverClassName", "myClassName"); environmentVariables.put("spring_datasource_username", "myUserName"); environmentVariables.put("spring_datasource_password", "myPassword"); assertThat(collector.forChannel(channels.output()), receivesPayloadThat(is(getDefaultRequest( environmentVariables, null, null)))); } } /** * Verify deploymentProperties are added to the TaskLaunchRequest. */ @IntegrationTest({"deploymentProperties=app.wow.hello=world,app.wow.foo=bar,app.wow.test=a=b,c=d,e=\"baz=bbb,nnn=mmm\"", "uri=" + DEFAULT_URI}) public static class UsingDeploymentPropertiesIntegrationTests extends TasklaunchrequestTransformProcessorIntegrationTests { @Test public void test() throws InterruptedException { channels.input().send(new GenericMessage<Object>("hello")); Map<String, String> environmentVariables = new HashMap<>(3); Map<String, String> deploymentProperties = new HashMap<>(2); deploymentProperties.put("app.wow.hello", "world"); deploymentProperties.put("app.wow.foo", "bar"); deploymentProperties.put("app.wow.test", "a=b"); deploymentProperties.put("c", "d"); deploymentProperties.put("e", "\"baz=bbb,nnn=mmm\""); assertThat(collector.forChannel(channels.output()), receivesPayloadThat(is(getDefaultRequest( environmentVariables, deploymentProperties, null)))); } } /** * Verify commandLineArguments are added to the TaskLaunchRequest. */ @IntegrationTest({"commandLineArguments=--hello=world --foo=bar", "uri=" + DEFAULT_URI}) public static class UsingCommandLineArgsIntegrationTests extends TasklaunchrequestTransformProcessorIntegrationTests { @Test public void test() throws InterruptedException { channels.input().send(new GenericMessage<Object>("hello")); Map<String, String> environmentVariables = new HashMap<>(1); List<String> commandLineArgs = new ArrayList<>(2); commandLineArgs.add("--hello=world"); commandLineArgs.add("--foo=bar"); assertThat(collector.forChannel(channels.output()), receivesPayloadThat(is(getDefaultRequest( environmentVariables, null, commandLineArgs)))); } } protected TaskLaunchRequest getDefaultRequest() { Map<String, String> environmentVariables = new HashMap<>(1); return getDefaultRequest(environmentVariables, null, null); } protected TaskLaunchRequest getDefaultRequest( Map <String,String> environmentVariables, Map<String,String> deploymentProperties, List<String> commandLineArgs) { TaskLaunchRequest request = new TaskLaunchRequest( DEFAULT_URI, commandLineArgs, environmentVariables, deploymentProperties); return request; } @SpringBootApplication public static class TasklaunchrequestTransformProcessorApplication { } }
/* BasicSplitPaneDivider.java -- Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc. This file is part of GNU Classpath. GNU Classpath is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2, or (at your option) any later version. GNU Classpath is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with GNU Classpath; see the file COPYING. If not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. Linking this library statically or dynamically with other modules is making a combined work based on this library. Thus, the terms and conditions of the GNU General Public License cover the whole combination. As a special exception, the copyright holders of this library give you permission to link this library with independent modules to produce an executable, regardless of the license terms of these independent modules, and to copy and distribute the resulting executable under terms of your choice, provided that you also meet, for each linked independent module, the terms and conditions of the license of that module. An independent module is a module which is not derived from or based on this library. If you modify this library, you may extend this exception to your version of the library, but you are not obligated to do so. If you do not wish to do so, delete this exception statement from your version. */ package javax.swing.plaf.basic; import java.awt.Component; import java.awt.Container; import java.awt.Dimension; import java.awt.Graphics; import java.awt.Insets; import java.awt.LayoutManager; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.awt.event.MouseMotionListener; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import javax.swing.JButton; import javax.swing.JSplitPane; import javax.swing.SwingConstants; import javax.swing.border.Border; /** * The divider that separates the two parts of a JSplitPane in the Basic look * and feel. * * <p> * Implementation status: We do not have a real implementation yet. Currently, * it is mostly a stub to allow compiling other parts of the * javax.swing.plaf.basic package, although some parts are already * functional. * </p> * * @author Sascha Brawer (brawer_AT_dandelis.ch) */ public class BasicSplitPaneDivider extends Container implements PropertyChangeListener { /** * Determined using the <code>serialver</code> tool of Apple/Sun JDK 1.3.1 * on MacOS X 10.1.5. */ static final long serialVersionUID = 1463404307042803342L; /** * The width and height of the little buttons for showing and hiding parts * of a JSplitPane in a single mouse click. */ protected static final int ONE_TOUCH_SIZE = 6; /** The distance the one touch buttons will sit from the divider's edges. */ protected static final int ONE_TOUCH_OFFSET = 2; /** * An object that performs the tasks associated with an ongoing drag * operation, or <code>null</code> if the user is currently not dragging * the divider. */ protected DragController dragger; /** * The delegate object that is responsible for the UI of the * <code>JSplitPane</code> that contains this divider. */ protected BasicSplitPaneUI splitPaneUI; /** The thickness of the divider in pixels. */ protected int dividerSize; /** A divider that is used for layout purposes. */ protected Component hiddenDivider; /** The JSplitPane containing this divider. */ protected JSplitPane splitPane; /** * The listener for handling mouse events from both the divider and the * containing <code>JSplitPane</code>. * * <p> * The reason for also handling MouseEvents from the containing * <code>JSplitPane</code> is that users should be able to start a drag * gesture from inside the JSplitPane, but slightly outisde the divider. * </p> */ protected MouseHandler mouseHandler = new MouseHandler(); /** * The current orientation of the containing <code>JSplitPane</code>, which * is either {@link javax.swing.JSplitPane#HORIZONTAL_SPLIT} or {@link * javax.swing.JSplitPane#VERTICAL_SPLIT}. */ protected int orientation; /** * The button for showing and hiding the left (or top) component of the * <code>JSplitPane</code>. */ protected JButton leftButton; /** * The button for showing and hiding the right (or bottom) component of the * <code>JSplitPane</code>. */ protected JButton rightButton; /** * The border of this divider. Typically, this will be an instance of {@link * javax.swing.plaf.basic.BasicBorders.SplitPaneDividerBorder}. * * @see #getBorder() * @see #setBorder(javax.swing.border.Border) */ private Border border; // This is not a pixel count. // This int should be able to take 3 values. // left (top), middle, right(bottom) // 0 1 2 /** * Keeps track of where the divider should be placed when using one touch * expand buttons. * This is package-private to avoid an accessor method. */ transient int currentDividerLocation = 1; /** * Constructs a new divider. * * @param ui the UI delegate of the enclosing <code>JSplitPane</code>. */ public BasicSplitPaneDivider(BasicSplitPaneUI ui) { setLayout(new DividerLayout()); setBasicSplitPaneUI(ui); setDividerSize(splitPane.getDividerSize()); } /** * Sets the delegate object that is responsible for the UI of the {@link * javax.swing.JSplitPane} containing this divider. * * @param newUI the UI delegate, or <code>null</code> to release the * connection to the current delegate. */ public void setBasicSplitPaneUI(BasicSplitPaneUI newUI) { /* Remove the connection to the existing JSplitPane. */ if (splitPane != null) { splitPane.removePropertyChangeListener(this); removeMouseListener(mouseHandler); removeMouseMotionListener(mouseHandler); splitPane = null; hiddenDivider = null; } /* Establish the connection to the new JSplitPane. */ splitPaneUI = newUI; if (splitPaneUI != null) splitPane = newUI.getSplitPane(); if (splitPane != null) { splitPane.addPropertyChangeListener(this); addMouseListener(mouseHandler); addMouseMotionListener(mouseHandler); hiddenDivider = splitPaneUI.getNonContinuousLayoutDivider(); orientation = splitPane.getOrientation(); oneTouchExpandableChanged(); } } /** * Returns the delegate object that is responsible for the UI of the {@link * javax.swing.JSplitPane} containing this divider. * * @return The UI for the JSplitPane. */ public BasicSplitPaneUI getBasicSplitPaneUI() { return splitPaneUI; } /** * Sets the thickness of the divider. * * @param newSize the new width or height in pixels. */ public void setDividerSize(int newSize) { this.dividerSize = newSize; } /** * Retrieves the thickness of the divider. * * @return The thickness of the divider. */ public int getDividerSize() { return dividerSize; } /** * Sets the border of this divider. * * @param border the new border. Typically, this will be an instance of * {@link * javax.swing.plaf.basic.BasicBorders.SplitPaneBorder}. * * @since 1.3 */ public void setBorder(Border border) { if (border != this.border) { Border oldValue = this.border; this.border = border; firePropertyChange("border", oldValue, border); } } /** * Retrieves the border of this divider. * * @return the current border, or <code>null</code> if no border has been * set. * * @since 1.3 */ public Border getBorder() { return border; } /** * Retrieves the insets of the divider. If a border has been installed on * the divider, the result of calling its <code>getBorderInsets</code> * method is returned. Otherwise, the inherited implementation will be * invoked. * * @see javax.swing.border.Border#getBorderInsets(java.awt.Component) */ public Insets getInsets() { if (border != null) return border.getBorderInsets(this); else return super.getInsets(); } /** * Returns the preferred size of this divider, which is * <code>dividerSize</code> by <code>dividerSize</code> pixels. * * @return The preferred size of the divider. */ public Dimension getPreferredSize() { return getLayout().preferredLayoutSize(this); } /** * Returns the minimal size of this divider, which is * <code>dividerSize</code> by <code>dividerSize</code> pixels. * * @return The minimal size of the divider. */ public Dimension getMinimumSize() { return getPreferredSize(); } /** * Processes events from the <code>JSplitPane</code> that contains this * divider. * * @param e The PropertyChangeEvent. */ public void propertyChange(PropertyChangeEvent e) { if (e.getPropertyName().equals(JSplitPane.ONE_TOUCH_EXPANDABLE_PROPERTY)) oneTouchExpandableChanged(); else if (e.getPropertyName().equals(JSplitPane.ORIENTATION_PROPERTY)) { orientation = splitPane.getOrientation(); if (splitPane.isOneTouchExpandable()) { layout(); repaint(); } } else if (e.getPropertyName().equals(JSplitPane.DIVIDER_SIZE_PROPERTY)) dividerSize = splitPane.getDividerSize(); } /** * Paints the divider by painting its border. * * @param g The Graphics Object to paint with. */ public void paint(Graphics g) { Dimension dividerSize; super.paint(g); if (border != null) { dividerSize = getSize(); border.paintBorder(this, g, 0, 0, dividerSize.width, dividerSize.height); } if (splitPane.isOneTouchExpandable()) { ((BasicArrowButton) rightButton).paint(g); ((BasicArrowButton) leftButton).paint(g); } } /** * Reacts to changes of the <code>oneToughExpandable</code> property of the * containing <code>JSplitPane</code>. */ protected void oneTouchExpandableChanged() { if (splitPane.isOneTouchExpandable()) { leftButton = createLeftOneTouchButton(); rightButton = createRightOneTouchButton(); add(leftButton); add(rightButton); leftButton.addMouseListener(mouseHandler); rightButton.addMouseListener(mouseHandler); // Set it to 1. currentDividerLocation = 1; } else { if (leftButton != null && rightButton != null) { leftButton.removeMouseListener(mouseHandler); rightButton.removeMouseListener(mouseHandler); remove(leftButton); remove(rightButton); leftButton = null; rightButton = null; } } layout(); repaint(); } /** * Creates a button for showing and hiding the left (or top) part of a * <code>JSplitPane</code>. * * @return The left one touch button. */ protected JButton createLeftOneTouchButton() { int dir = SwingConstants.WEST; if (orientation == JSplitPane.VERTICAL_SPLIT) dir = SwingConstants.NORTH; JButton button = new BasicArrowButton(dir); button.setBorder(null); return button; } /** * Creates a button for showing and hiding the right (or bottom) part of a * <code>JSplitPane</code>. * * @return The right one touch button. */ protected JButton createRightOneTouchButton() { int dir = SwingConstants.EAST; if (orientation == JSplitPane.VERTICAL_SPLIT) dir = SwingConstants.SOUTH; JButton button = new BasicArrowButton(dir); button.setBorder(null); return button; } /** * Prepares the divider for dragging by calling the * <code>startDragging</code> method of the UI delegate of the enclosing * <code>JSplitPane</code>. * * @see BasicSplitPaneUI#startDragging() */ protected void prepareForDragging() { if (splitPaneUI != null) splitPaneUI.startDragging(); } /** * Drags the divider to a given location by calling the * <code>dragDividerTo</code> method of the UI delegate of the enclosing * <code>JSplitPane</code>. * * @param location the new location of the divider. * * @see BasicSplitPaneUI#dragDividerTo(int location) */ protected void dragDividerTo(int location) { if (splitPaneUI != null) splitPaneUI.dragDividerTo(location); } /** * Finishes a dragging gesture by calling the <code>finishDraggingTo</code> * method of the UI delegate of the enclosing <code>JSplitPane</code>. * * @param location the new, final location of the divider. * * @see BasicSplitPaneUI#finishDraggingTo(int location) */ protected void finishDraggingTo(int location) { if (splitPaneUI != null) splitPaneUI.finishDraggingTo(location); } /** * This helper method moves the divider to one of the three locations when * using one touch expand buttons. Location 0 is the left (or top) most * location. Location 1 is the middle. Location 2 is the right (or bottom) * most location. * This is package-private to avoid an accessor method. * * @param locationIndex The location to move to. */ void moveDividerTo(int locationIndex) { Insets insets = splitPane.getInsets(); switch (locationIndex) { case 1: splitPane.setDividerLocation(splitPane.getLastDividerLocation()); break; case 0: int top = (orientation == JSplitPane.HORIZONTAL_SPLIT) ? insets.left : insets.top; splitPane.setDividerLocation(top); break; case 2: int bottom; if (orientation == JSplitPane.HORIZONTAL_SPLIT) bottom = splitPane.getBounds().width - insets.right - dividerSize; else bottom = splitPane.getBounds().height - insets.bottom - dividerSize; splitPane.setDividerLocation(bottom); break; } } /** * The listener for handling mouse events from both the divider and the * containing <code>JSplitPane</code>. * * <p> * The reason for also handling MouseEvents from the containing * <code>JSplitPane</code> is that users should be able to start a drag * gesture from inside the JSplitPane, but slightly outisde the divider. * </p> * * @author Sascha Brawer (brawer_AT_dandelis.ch) */ protected class MouseHandler extends MouseAdapter implements MouseMotionListener { /** Keeps track of whether a drag is occurring. */ private transient boolean isDragging; /** * This method is called when the mouse is pressed. * * @param e The MouseEvent. */ public void mousePressed(MouseEvent e) { if (splitPane.isOneTouchExpandable()) { if (e.getSource() == leftButton) { currentDividerLocation--; if (currentDividerLocation < 0) currentDividerLocation = 0; moveDividerTo(currentDividerLocation); return; } else if (e.getSource() == rightButton) { currentDividerLocation++; if (currentDividerLocation > 2) currentDividerLocation = 2; moveDividerTo(currentDividerLocation); return; } } isDragging = true; currentDividerLocation = 1; if (orientation == JSplitPane.HORIZONTAL_SPLIT) dragger = new DragController(e); else dragger = new VerticalDragController(e); prepareForDragging(); } /** * This method is called when the mouse is released. * * @param e The MouseEvent. */ public void mouseReleased(MouseEvent e) { if (isDragging) dragger.completeDrag(e); isDragging = false; } /** * Repeatedly invoked when the user is dragging the mouse cursor while * having pressed a mouse button. * * @param e The MouseEvent. */ public void mouseDragged(MouseEvent e) { if (dragger != null) dragger.continueDrag(e); } /** * Repeatedly invoked when the user is dragging the mouse cursor without * having pressed a mouse button. * * @param e The MouseEvent. */ public void mouseMoved(MouseEvent e) { // Do nothing. } } /** * Performs the tasks associated with an ongoing drag operation. * * @author Sascha Brawer (brawer_AT_dandelis.ch) */ protected class DragController { /** * The difference between where the mouse is clicked and the initial * divider location. */ transient int offset; /** * Creates a new DragController object. * * @param e The MouseEvent to initialize with. */ protected DragController(MouseEvent e) { offset = e.getX(); } /** * This method returns true if the divider can move. * * @return True if dragging is allowed. */ protected boolean isValid() { // Views can always be resized? return true; } /** * Returns a position for the divider given the MouseEvent. * * @param e MouseEvent. * * @return The position for the divider to move to. */ protected int positionForMouseEvent(MouseEvent e) { return e.getX() + getX() - offset; } /** * This method returns one of the two paramters for the orientation. In * this case, it returns x. * * @param x The x coordinate. * @param y The y coordinate. * * @return The x coordinate. */ protected int getNeededLocation(int x, int y) { return x; } /** * This method is called to pass on the drag information to the UI through * dragDividerTo. * * @param newX The x coordinate of the MouseEvent. * @param newY The y coordinate of the MouseEvent. */ protected void continueDrag(int newX, int newY) { if (isValid()) dragDividerTo(adjust(newX, newY)); } /** * This method is called to pass on the drag information to the UI * through dragDividerTo. * * @param e The MouseEvent. */ protected void continueDrag(MouseEvent e) { if (isValid()) dragDividerTo(positionForMouseEvent(e)); } /** * This method is called to finish the drag session by calling * finishDraggingTo. * * @param x The x coordinate of the MouseEvent. * @param y The y coordinate of the MouseEvent. */ protected void completeDrag(int x, int y) { finishDraggingTo(adjust(x, y)); } /** * This method is called to finish the drag session by calling * finishDraggingTo. * * @param e The MouseEvent. */ protected void completeDrag(MouseEvent e) { finishDraggingTo(positionForMouseEvent(e)); } /** * This is a helper method that includes the offset in the needed * location. * * @param x The x coordinate of the MouseEvent. * @param y The y coordinate of the MouseEvent. * * @return The needed location adjusted by the offsets. */ int adjust(int x, int y) { return getNeededLocation(x, y) + getX() - offset; } } /** * This is a helper class that controls dragging when the orientation is * VERTICAL_SPLIT. */ protected class VerticalDragController extends DragController { /** * Creates a new VerticalDragController object. * * @param e The MouseEvent to initialize with. */ protected VerticalDragController(MouseEvent e) { super(e); offset = e.getY(); } /** * This method returns one of the two parameters given the orientation. In * this case, it returns y. * * @param x The x coordinate of the MouseEvent. * @param y The y coordinate of the MouseEvent. * * @return The y coordinate. */ protected int getNeededLocation(int x, int y) { return y; } /** * This method returns the new location of the divider given a MouseEvent. * * @param e The MouseEvent. * * @return The new location of the divider. */ protected int positionForMouseEvent(MouseEvent e) { return e.getY() + getY() - offset; } /** * This is a helper method that includes the offset in the needed * location. * * @param x The x coordinate of the MouseEvent. * @param y The y coordinate of the MouseEvent. * * @return The needed location adjusted by the offsets. */ int adjust(int x, int y) { return getNeededLocation(x, y) + getY() - offset; } } /** * This helper class acts as the Layout Manager for the divider. */ protected class DividerLayout implements LayoutManager { /** * Creates a new DividerLayout object. */ protected DividerLayout() { // Nothing to do here. } /** * This method is called when a Component is added. * * @param string The constraints string. * @param c The Component to add. */ public void addLayoutComponent(String string, Component c) { // Do nothing. } /** * This method is called to lay out the container. * * @param c The container to lay out. */ public void layoutContainer(Container c) { if (splitPane.isOneTouchExpandable()) { changeButtonOrientation(); positionButtons(); } } /** * This method returns the minimum layout size. * * @param c The container to calculate for. * * @return The minimum layout size. */ public Dimension minimumLayoutSize(Container c) { return preferredLayoutSize(c); } /** * This method returns the preferred layout size. * * @param c The container to calculate for. * * @return The preferred layout size. */ public Dimension preferredLayoutSize(Container c) { return new Dimension(dividerSize, dividerSize); } /** * This method is called when a component is removed. * * @param c The component to remove. */ public void removeLayoutComponent(Component c) { // Do nothing. } /** * This method changes the button orientation when the orientation of the * SplitPane changes. */ private void changeButtonOrientation() { if (orientation == JSplitPane.HORIZONTAL_SPLIT) { ((BasicArrowButton) rightButton).setDirection(SwingConstants.EAST); ((BasicArrowButton) leftButton).setDirection(SwingConstants.WEST); } else { ((BasicArrowButton) rightButton).setDirection(SwingConstants.SOUTH); ((BasicArrowButton) leftButton).setDirection(SwingConstants.NORTH); } } /** * This method sizes and positions the buttons. */ private void positionButtons() { int w = 0; int h = 0; if (orientation == JSplitPane.HORIZONTAL_SPLIT) { rightButton.setLocation(ONE_TOUCH_OFFSET, ONE_TOUCH_OFFSET); leftButton.setLocation(ONE_TOUCH_OFFSET, ONE_TOUCH_OFFSET + 2 * ONE_TOUCH_SIZE); w = dividerSize - 2 * ONE_TOUCH_OFFSET; h = 2 * ONE_TOUCH_SIZE; } else { leftButton.setLocation(ONE_TOUCH_OFFSET, ONE_TOUCH_OFFSET); rightButton.setLocation(ONE_TOUCH_OFFSET + 2 * ONE_TOUCH_SIZE, ONE_TOUCH_OFFSET); h = dividerSize - 2 * ONE_TOUCH_OFFSET; w = 2 * ONE_TOUCH_SIZE; } Dimension dims = new Dimension(w, h); leftButton.setSize(dims); rightButton.setSize(dims); } } }
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.codeInsight.template.impl; import com.intellij.codeInsight.CodeInsightBundle; import com.intellij.icons.AllIcons; import com.intellij.ide.DataManager; import com.intellij.ide.IdeBundle; import com.intellij.ide.dnd.*; import com.intellij.ide.dnd.aware.DnDAwareTree; import com.intellij.openapi.Disposable; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.options.CompoundScheme; import com.intellij.openapi.options.ConfigurationException; import com.intellij.openapi.project.DumbAwareAction; import com.intellij.openapi.ui.*; import com.intellij.openapi.ui.popup.JBPopupFactory; import com.intellij.openapi.ui.popup.ListPopup; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.EmptyRunnable; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.text.StringUtil; import com.intellij.ui.*; import com.intellij.ui.speedSearch.SpeedSearchSupply; import com.intellij.util.Alarm; import com.intellij.util.NullableFunction; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.ui.tree.TreeUtil; import com.intellij.util.ui.update.UiNotifyConnector; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.event.TreeSelectionEvent; import javax.swing.event.TreeSelectionListener; import javax.swing.tree.*; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.KeyEvent; import java.util.List; import java.util.*; public class TemplateListPanel extends JPanel implements Disposable { private static final String NO_SELECTION = "NoSelection"; private static final String TEMPLATE_SETTINGS = "TemplateSettings"; private static final TemplateImpl MOCK_TEMPLATE = new TemplateImpl("mockTemplate-xxx", "mockTemplateGroup-yyy"); public static final String ABBREVIATION = "<abbreviation>"; public static final Comparator<TemplateImpl> TEMPLATE_COMPARATOR = new Comparator<TemplateImpl>() { @Override public int compare(TemplateImpl o1, TemplateImpl o2) { int compareKey = compareCaseInsensitively(o1.getKey(), o2.getKey()); return compareKey != 0 ? compareKey : compareCaseInsensitively(o1.getGroupName(), o2.getGroupName()); } private int compareCaseInsensitively(String s1, String s2) { int result = s1.compareToIgnoreCase(s2); return result != 0 ? result : s1.compareTo(s2); } }; static { MOCK_TEMPLATE.setString(""); } private CheckboxTree myTree; private final List<TemplateGroup> myTemplateGroups = new ArrayList<>(); private final TemplateExpandShortcutPanel myExpandByDefaultPanel = new TemplateExpandShortcutPanel(CodeInsightBundle.message("templates.dialog.shortcut.chooser.label")); private CheckedTreeNode myTreeRoot = new CheckedTreeNode(null); private final Alarm myAlarm = new Alarm(); private boolean myUpdateNeeded = false; private static final Logger LOG = Logger.getInstance(TemplateListPanel.class); private final Map<TemplateImpl, Map<TemplateOptionalProcessor, Boolean>> myTemplateOptions = ContainerUtil.newIdentityHashMap(); private final Map<TemplateImpl, TemplateContext> myTemplateContext = ContainerUtil.newIdentityHashMap(); private final JPanel myDetailsPanel = new JPanel(new CardLayout()); private LiveTemplateSettingsEditor myCurrentTemplateEditor; private final JLabel myEmptyCardLabel = new JLabel(); private final CompoundScheme.MutatorHelper<TemplateGroup, TemplateImpl> mutatorHelper = new CompoundScheme.MutatorHelper<>(); public TemplateListPanel() { super(new BorderLayout()); myDetailsPanel.setBorder(BorderFactory.createEmptyBorder(10, 0, 0, 0)); myEmptyCardLabel.setHorizontalAlignment(SwingConstants.CENTER); myDetailsPanel.add(myEmptyCardLabel, NO_SELECTION); createTemplateEditor(MOCK_TEMPLATE, "Tab", MOCK_TEMPLATE.createOptions(), MOCK_TEMPLATE.createContext()); add(myExpandByDefaultPanel, BorderLayout.NORTH); Splitter splitter = new Splitter(true, 0.9f); splitter.setFirstComponent(createTable()); splitter.setSecondComponent(myDetailsPanel); add(splitter, BorderLayout.CENTER); } @Override public void dispose() { myCurrentTemplateEditor.dispose(); myAlarm.cancelAllRequests(); } public void reset() { myTemplateOptions.clear(); myTemplateContext.clear(); TemplateSettings templateSettings = TemplateSettings.getInstance(); List<TemplateGroup> groups = getSortedGroups(templateSettings); initTemplates(groups, templateSettings.getLastSelectedTemplateGroup(), templateSettings.getLastSelectedTemplateKey()); myExpandByDefaultPanel.setSelectedChar(templateSettings.getDefaultShortcutChar()); UiNotifyConnector.doWhenFirstShown(this, () -> updateTemplateDetails(false, false)); myUpdateNeeded = true; } @NotNull private static List<TemplateGroup> getSortedGroups(TemplateSettings templateSettings) { List<TemplateGroup> groups = new ArrayList<>(templateSettings.getTemplateGroups()); Collections.sort(groups, (o1, o2) -> o1.getName().compareToIgnoreCase(o2.getName())); return groups; } public void apply() throws ConfigurationException { List<TemplateGroup> templateGroups = getTemplateGroups(); for (TemplateGroup templateGroup : templateGroups) { Set<String> names = new HashSet<>(); List<TemplateImpl> templates = templateGroup.getElements(); for (TemplateImpl template : templates) { String key = template.getKey(); if (StringUtil.isEmptyOrSpaces(key)) { throw new ConfigurationException("A live template with an empty abbreviation has been found in '" + templateGroup.getName() + "' group, such live templates are not allowed"); } if (StringUtil.isEmptyOrSpaces(template.getString())) { throw new ConfigurationException("A live template '" + key + "' with an empty text has been found in '" + templateGroup.getName() + "' group, such live templates make no sense"); } if (!names.add(key)) { throw new ConfigurationException("Duplicate `" + key + "` live templates in " + templateGroup.getName() + " group"); } } } for (TemplateGroup templateGroup : templateGroups) { for (TemplateImpl template : templateGroup.getElements()) { template.applyOptions(getTemplateOptions(template)); template.applyContext(getTemplateContext(template)); } } TemplateSettings templateSettings = TemplateSettings.getInstance(); templateSettings.setTemplates(mutatorHelper.apply(templateGroups, (original, copied) -> { if (original.isModified()) { return; } List<TemplateImpl> originalElements = original.getElements(); List<TemplateImpl> copiedElements = copied.getElements(); if (!originalElements.equals(copiedElements)) { original.setModified(true); } else { // TemplateImpl.equals doesn't compare context and I (develar) don't want to risk and change this behavior, so, we compare it explicitly for (int i = 0; i < originalElements.size(); i++) { if (originalElements.get(i).getTemplateContext().getDifference(copiedElements.get(i).getTemplateContext()) != null) { original.setModified(true); break; } } } })); templateSettings.setDefaultShortcutChar(myExpandByDefaultPanel.getSelectedChar()); } private final boolean isTest = ApplicationManager.getApplication().isUnitTestMode(); public boolean isModified() { TemplateSettings templateSettings = TemplateSettings.getInstance(); if (templateSettings.getDefaultShortcutChar() != myExpandByDefaultPanel.getSelectedChar()) { if (isTest) { //noinspection UseOfSystemOutOrSystemErr System.err.println("LiveTemplatesConfig: templateSettings.getDefaultShortcutChar()="+templateSettings.getDefaultShortcutChar() + "; myExpandByDefaultComponent.getSelectedChar()="+ myExpandByDefaultPanel.getSelectedChar()); } return true; } List<TemplateGroup> originalGroups = getSortedGroups(templateSettings); List<TemplateGroup> newGroups = getTemplateGroups(); if (!ContainerUtil.map2Set(originalGroups, TemplateGroup::getName).equals(ContainerUtil.map2Set(newGroups, TemplateGroup::getName))) { return true; } List<TemplateImpl> originalGroup = collectTemplates(originalGroups); List<TemplateImpl> newGroup = collectTemplates(newGroups); String msg = checkAreEqual(originalGroup, newGroup); if (msg == null) return false; if (isTest) { //noinspection UseOfSystemOutOrSystemErr System.err.println("LiveTemplatesConfig: " + msg); } return true; } public void editTemplate(TemplateImpl template) { selectTemplate(template.getGroupName(), template.getKey()); updateTemplateDetails(true, false); } @Nullable public JComponent getPreferredFocusedComponent() { if (getTemplate(getSingleSelectedIndex()) != null) { return myCurrentTemplateEditor.getKeyField(); } return null; } private static List<TemplateImpl> collectTemplates(@NotNull List<? extends TemplateGroup> groups) { List<TemplateImpl> result = new ArrayList<>(); for (TemplateGroup group : groups) { result.addAll(group.getElements()); } Collections.sort(result, (o1, o2) -> { final int groupsEqual = o1.getGroupName().compareToIgnoreCase(o2.getGroupName()); if (groupsEqual != 0) { return groupsEqual; } return o1.getKey().compareToIgnoreCase(o2.getKey()); }); return result; } private String checkAreEqual(@NotNull List<? extends TemplateImpl> originalGroup, @NotNull List<? extends TemplateImpl> newGroup) { if (originalGroup.size() != newGroup.size()) return "different sizes"; for (int i = 0; i < newGroup.size(); i++) { TemplateImpl t1 = newGroup.get(i); TemplateImpl t2 = originalGroup.get(i); if (templatesDiffer(t1, t2)) { if (isTest) { return "Templates differ: new=" + t1 + "; original=" + t2 + "; equals=" + t1.equals(t2) + "; vars=" + t1.getVariables().equals(t2.getVariables()) + "; options=" + areOptionsEqual(t1, t2) + "; diff=" + getTemplateContext(t1).getDifference(t2.getTemplateContext()) + "\ncontext1=" + getTemplateContext(t1) + "\ncontext2=" + getTemplateContext(t2); } return "templates differ"; } } return null; } private boolean areOptionsEqual(@NotNull TemplateImpl newTemplate, @NotNull TemplateImpl originalTemplate) { Map<TemplateOptionalProcessor, Boolean> templateOptions = getTemplateOptions(newTemplate); for (TemplateOptionalProcessor processor : templateOptions.keySet()) { if (processor.isEnabled(originalTemplate) != templateOptions.get(processor).booleanValue()) return false; } return true; } private TemplateContext getTemplateContext(final TemplateImpl newTemplate) { return myTemplateContext.get(newTemplate); } private Map<TemplateOptionalProcessor, Boolean> getTemplateOptions(@NotNull TemplateImpl newTemplate) { return myTemplateOptions.get(newTemplate); } private List<TemplateGroup> getTemplateGroups() { return myTemplateGroups; } private void createTemplateEditor(final TemplateImpl template, String shortcut, Map<TemplateOptionalProcessor, Boolean> options, TemplateContext context) { myCurrentTemplateEditor = new LiveTemplateSettingsEditor(template, shortcut, options, context, () -> { DefaultMutableTreeNode node = getNode(getSingleSelectedIndex()); if (node != null) { ((DefaultTreeModel)myTree.getModel()).nodeChanged(node); TemplateSettings.getInstance().setLastSelectedTemplate(template.getGroupName(), template.getKey()); } }); for (Component component : myDetailsPanel.getComponents()) { if (component instanceof LiveTemplateSettingsEditor) { myDetailsPanel.remove(component); } } myDetailsPanel.add(myCurrentTemplateEditor, TEMPLATE_SETTINGS); } @Nullable private TemplateImpl getTemplate(int row) { JTree tree = myTree; TreePath path = tree.getPathForRow(row); if (path != null) { DefaultMutableTreeNode node = (DefaultMutableTreeNode)path.getLastPathComponent(); if (node.getUserObject() instanceof TemplateImpl) { return (TemplateImpl)node.getUserObject(); } } return null; } @Nullable private TemplateGroup getGroup(int row) { TreePath path = myTree.getPathForRow(row); if (path != null) { DefaultMutableTreeNode node = (DefaultMutableTreeNode)path.getLastPathComponent(); if (node.getUserObject() instanceof TemplateGroup) { return (TemplateGroup)node.getUserObject(); } } return null; } private void moveTemplates(Map<TemplateImpl, DefaultMutableTreeNode> map, @NotNull String newGroupName) { List<TreePath> toSelect = new ArrayList<>(); for (TemplateImpl template : map.keySet()) { DefaultMutableTreeNode oldTemplateNode = map.get(template); TemplateGroup oldGroup = getTemplateGroup(template.getGroupName()); if (oldGroup != null) { oldGroup.removeElement(template); } template.setGroupName(newGroupName); removeNodeFromParent(oldTemplateNode); toSelect.add(new TreePath(registerTemplate(template).getPath())); } myTree.getSelectionModel().clearSelection(); for (TreePath path : toSelect) { myTree.expandPath(path.getParentPath()); myTree.addSelectionPath(path); myTree.scrollRowToVisible(myTree.getRowForPath(path)); } } @Nullable private DefaultMutableTreeNode getNode(final int row) { JTree tree = myTree; TreePath path = tree.getPathForRow(row); if (path != null) { return (DefaultMutableTreeNode)path.getLastPathComponent(); } return null; } @Nullable private TemplateGroup getTemplateGroup(final String groupName) { for (TemplateGroup group : myTemplateGroups) { if (group.getName().equals(groupName)) return group; } return null; } private void addTemplate() { String defaultGroup = TemplateSettings.USER_GROUP_NAME; final DefaultMutableTreeNode node = getNode(getSingleSelectedIndex()); if (node != null) { if (node.getUserObject() instanceof TemplateImpl) { defaultGroup = ((TemplateImpl) node.getUserObject()).getGroupName(); } else if (node.getUserObject() instanceof TemplateGroup) { defaultGroup = ((TemplateGroup) node.getUserObject()).getName(); } } addTemplate(new TemplateImpl(ABBREVIATION, "", defaultGroup)); } public void addTemplate(TemplateImpl template) { myTemplateOptions.put(template, template.createOptions()); myTemplateContext.put(template, template.createContext()); registerTemplate(template); updateTemplateDetails(true, false); } private void copyRow() { int selected = getSingleSelectedIndex(); if (selected < 0) return; TemplateImpl orTemplate = getTemplate(selected); LOG.assertTrue(orTemplate != null); TemplateImpl template = orTemplate.copy(); template.setKey(ABBREVIATION); myTemplateOptions.put(template, new HashMap<>(getTemplateOptions(orTemplate))); myTemplateContext.put(template, getTemplateContext(orTemplate).createCopy()); registerTemplate(template); updateTemplateDetails(true, false); } private int getSingleSelectedIndex() { int[] rows = myTree.getSelectionRows(); return rows != null && rows.length == 1 ? rows[0] : -1; } private void removeRows() { TreeNode toSelect = null; TreePath[] paths = myTree.getSelectionPaths(); if (paths == null) return; for (TreePath path : paths) { DefaultMutableTreeNode node = (DefaultMutableTreeNode)path.getLastPathComponent(); Object o = node.getUserObject(); if (o instanceof TemplateGroup) { myTemplateGroups.remove(o); removeNodeFromParent(node); } else if (o instanceof TemplateImpl) { TemplateImpl template = (TemplateImpl)o; TemplateGroup templateGroup = getTemplateGroup(template.getGroupName()); if (templateGroup != null) { templateGroup.removeElement(template); toSelect = ((DefaultMutableTreeNode)node.getParent()).getChildAfter(node); removeNodeFromParent(node); } } } if (toSelect instanceof DefaultMutableTreeNode) { setSelectedNode((DefaultMutableTreeNode)toSelect); } } private JPanel createTable() { myTreeRoot = new CheckedTreeNode(null); myTree = new LiveTemplateTree(new CheckboxTree.CheckboxTreeCellRenderer() { @Override public void customizeRenderer(final JTree tree, Object value, final boolean selected, final boolean expanded, final boolean leaf, final int row, final boolean hasFocus) { if (!(value instanceof DefaultMutableTreeNode)) return; value = ((DefaultMutableTreeNode)value).getUserObject(); if (value instanceof TemplateImpl) { TemplateImpl template = (TemplateImpl)value; TemplateImpl defaultTemplate = TemplateSettings.getInstance().getDefaultTemplate(template); Color fgColor = defaultTemplate != null && templatesDiffer(template, defaultTemplate) ? JBColor.BLUE : null; getTextRenderer().append(template.getKey(), new SimpleTextAttributes(SimpleTextAttributes.STYLE_PLAIN, fgColor)); String description = template.getDescription(); if (StringUtil.isNotEmpty(description)) { getTextRenderer().append(" (" + description + ")", SimpleTextAttributes.GRAY_ATTRIBUTES); } } else if (value instanceof TemplateGroup) { getTextRenderer().append(((TemplateGroup)value).getName(), SimpleTextAttributes.REGULAR_BOLD_ATTRIBUTES); } } }, myTreeRoot, this); myTree.setRootVisible(false); myTree.setShowsRootHandles(true); myTree.getSelectionModel().setSelectionMode(TreeSelectionModel.DISCONTIGUOUS_TREE_SELECTION); myTree.getSelectionModel().addTreeSelectionListener(new TreeSelectionListener(){ @Override public void valueChanged(@NotNull final TreeSelectionEvent e) { TemplateSettings templateSettings = TemplateSettings.getInstance(); TemplateImpl template = getTemplate(getSingleSelectedIndex()); if (template != null) { templateSettings.setLastSelectedTemplate(template.getGroupName(), template.getKey()); } else { templateSettings.setLastSelectedTemplate(null, null); showEmptyCard(); } if (myUpdateNeeded) { myAlarm.cancelAllRequests(); myAlarm.addRequest(() -> updateTemplateDetails(false, false), 100); } } }); myTree.registerKeyboardAction(new ActionListener() { @Override public void actionPerformed(@Nullable ActionEvent event) { myCurrentTemplateEditor.focusKey(); } }, KeyStroke.getKeyStroke(KeyEvent.VK_ENTER, 0), JComponent.WHEN_FOCUSED); installPopup(); DnDSupport.createBuilder(myTree) .setBeanProvider((NullableFunction<DnDActionInfo, DnDDragStartBean>)dnDActionInfo -> { Point point = dnDActionInfo.getPoint(); if (myTree.getPathForLocation(point.x, point.y) == null) return null; Map<TemplateImpl, DefaultMutableTreeNode> templates = getSelectedTemplates(); return !templates.isEmpty() ? new DnDDragStartBean(templates) : null; }). setDisposableParent(this) .setTargetChecker(new DnDTargetChecker() { @Override public boolean update(DnDEvent event) { @SuppressWarnings("unchecked") Set<String> oldGroupNames = getAllGroups((Map<TemplateImpl, DefaultMutableTreeNode>)event.getAttachedObject()); TemplateGroup group = getDropGroup(event); boolean differentGroup = group != null && !oldGroupNames.contains(group.getName()); event.setDropPossible(differentGroup, ""); return true; } }) .setDropHandler(new DnDDropHandler() { @Override public void drop(DnDEvent event) { //noinspection unchecked moveTemplates((Map<TemplateImpl, DefaultMutableTreeNode>)event.getAttachedObject(), Objects.requireNonNull(getDropGroup(event)).getName()); } }) .setImageProvider((NullableFunction<DnDActionInfo, DnDImage>)dnDActionInfo -> { Point point = dnDActionInfo.getPoint(); TreePath path = myTree.getPathForLocation(point.x, point.y); return path == null ? null : new DnDImage(DnDAwareTree.getDragImage(myTree, path, point).first); }) .install(); if (myTemplateGroups.size() > 0) { myTree.setSelectionInterval(0, 0); } return initToolbar().createPanel(); } private void showEmptyCard() { int[] rows = myTree.getSelectionRows(); boolean multiSelection = rows != null && rows.length > 1; myEmptyCardLabel.setText(multiSelection ? CodeInsightBundle.message("templates.list.multiple.live.templates.are.selected") : CodeInsightBundle.message("templates.list.no.live.templates.are.selected")); ((CardLayout) myDetailsPanel.getLayout()).show(myDetailsPanel, NO_SELECTION); } private boolean templatesDiffer(@NotNull TemplateImpl template, @NotNull TemplateImpl defaultTemplate) { template.parseSegments(); defaultTemplate.parseSegments(); return !template.equals(defaultTemplate) || !template.getVariables().equals(defaultTemplate.getVariables()) || !areOptionsEqual(template, defaultTemplate) || getTemplateContext(template).getDifference(defaultTemplate.getTemplateContext()) != null; } private ToolbarDecorator initToolbar() { ToolbarDecorator decorator = ToolbarDecorator.createDecorator(myTree) .setAddAction(new AnActionButtonRunnable() { @Override public void run(AnActionButton button) { addTemplateOrGroup(button); } }) .setRemoveAction(new AnActionButtonRunnable() { @Override public void run(AnActionButton anActionButton) { removeRows(); } }) .disableDownAction() .disableUpAction() .addExtraAction(new AnActionButton(CodeInsightBundle.lazyMessage("action.AnActionButton.Template.list.text.duplicate"), AllIcons.Actions.Copy) { @Override public void actionPerformed(@NotNull AnActionEvent e) { copyRow(); } @Override public void updateButton(@NotNull AnActionEvent e) { e.getPresentation().setEnabled(getTemplate(getSingleSelectedIndex()) != null); } }).addExtraAction(new AnActionButton(CodeInsightBundle.lazyMessage("action.AnActionButton.text.restore.deleted.defaults"), AllIcons.Actions.Rollback) { @Override public void actionPerformed(@NotNull AnActionEvent e) { TemplateSettings.getInstance().reset(); reset(); } @Override public boolean isEnabled() { return super.isEnabled() && !TemplateSettings.getInstance().getDeletedTemplates().isEmpty(); } }); return decorator.setToolbarPosition(ActionToolbarPosition.RIGHT); } private void addTemplateOrGroup(AnActionButton button) { DefaultActionGroup group = new DefaultActionGroup(); group.add(new DumbAwareAction(IdeBundle.lazyMessage("action.Anonymous.text.live.template")) { @Override public void actionPerformed(@NotNull AnActionEvent e) { addTemplate(); } }); group.add(new DumbAwareAction(IdeBundle.lazyMessage("action.Anonymous.text.template.group")) { @Override public void actionPerformed(@NotNull AnActionEvent e) { String newName = Messages .showInputDialog(myTree, "Enter the new group name:", "Create New Group", null, "", new TemplateGroupInputValidator(null)); if (newName != null) { TemplateGroup newGroup = new TemplateGroup(newName); setSelectedNode(insertNewGroup(newGroup)); } } }); DataContext context = DataManager.getInstance().getDataContext(button.getContextComponent()); ListPopup popup = JBPopupFactory.getInstance() .createActionGroupPopup(null, group, context, JBPopupFactory.ActionSelectionAid.ALPHA_NUMBERING, true, null); popup.show(button.getPreferredPopupPoint()); } @Nullable private TemplateGroup getDropGroup(DnDEvent event) { Point point = event.getPointOn(myTree); return getGroup(myTree.getRowForLocation(point.x, point.y)); } private void installPopup() { final DumbAwareAction rename = new DumbAwareAction(IdeBundle.lazyMessage("action.Anonymous.text.rename")) { @Override public void update(@NotNull AnActionEvent e) { final TemplateGroup templateGroup = getSingleSelectedGroup(); boolean enabled = templateGroup != null; e.getPresentation().setEnabledAndVisible(enabled); super.update(e); } @Override public void actionPerformed(@NotNull AnActionEvent e) { renameGroup(); } }; rename.registerCustomShortcutSet(ActionManager.getInstance().getAction(IdeActions.ACTION_RENAME).getShortcutSet(), myTree); final DefaultActionGroup move = new DefaultActionGroup("Move", true) { @Override public void update(@NotNull AnActionEvent e) { final Map<TemplateImpl, DefaultMutableTreeNode> templates = getSelectedTemplates(); boolean enabled = !templates.isEmpty(); e.getPresentation().setEnabledAndVisible(enabled); if (enabled) { Set<String> oldGroups = getAllGroups(templates); removeAll(); for (TemplateGroup group : getTemplateGroups()) { final String newGroupName = group.getName(); if (!oldGroups.contains(newGroupName)) { add(new DumbAwareAction(newGroupName) { @Override public void actionPerformed(@NotNull AnActionEvent e) { moveTemplates(templates, newGroupName); } }); } } addSeparator(); add(new DumbAwareAction(IdeBundle.lazyMessage("action.Anonymous.text.new.group")) { @Override public void actionPerformed(@NotNull AnActionEvent e) { String newName = Messages.showInputDialog(myTree, "Enter the new group name:", "Move to a New Group", null, "", new TemplateGroupInputValidator(null)); if (newName != null) { moveTemplates(templates, newName); } } }); } } }; final DumbAwareAction changeContext = new DumbAwareAction(IdeBundle.lazyMessage("action.Anonymous.text.change.context")) { @Override public void update(@NotNull AnActionEvent e) { boolean enabled = !getSelectedTemplates().isEmpty(); e.getPresentation().setEnabled(enabled); super.update(e); } @Override public void actionPerformed(@NotNull AnActionEvent e) { Map<TemplateImpl, DefaultMutableTreeNode> templates = getSelectedTemplates(); TemplateContext context = new TemplateContext(); Pair<JPanel, CheckboxTree> pair = LiveTemplateSettingsEditor.createPopupContextPanel(EmptyRunnable.INSTANCE, context); DialogBuilder builder = new DialogBuilder(TemplateListPanel.this); builder.setCenterPanel(pair.first); builder.setPreferredFocusComponent(pair.second); builder.setTitle("Change Context Type For Selected Templates"); int result = builder.show(); if (result == DialogWrapper.OK_EXIT_CODE) { for (TemplateImpl template : templates.keySet()) { myTemplateContext.put(template, context); } } updateTemplateDetails(false, true); myTree.repaint(); } }; final DumbAwareAction revert = new DumbAwareAction(CodeInsightBundle.lazyMessage("action.DumbAware.TemplateListPanel.text.restore.defaults"), CodeInsightBundle.lazyMessage("action.DumbAware.TemplateListPanel.description.restore.default.setting"), null) { @Override public void update(@NotNull AnActionEvent e) { boolean enabled = false; Map<TemplateImpl, DefaultMutableTreeNode> templates = getSelectedTemplates(); for (TemplateImpl template : templates.keySet()) { TemplateImpl defaultTemplate = TemplateSettings.getInstance().getDefaultTemplate(template); if (defaultTemplate != null && templatesDiffer(template, defaultTemplate)) { enabled = true; } } e.getPresentation().setEnabledAndVisible(enabled); super.update(e); } @Override public void actionPerformed(@NotNull AnActionEvent e) { Map<TemplateImpl, DefaultMutableTreeNode> templates = getSelectedTemplates(); for (TemplateImpl template : templates.keySet()) { TemplateImpl defaultTemplate = TemplateSettings.getInstance().getDefaultTemplate(template); if (defaultTemplate != null) { myTemplateOptions.put(template, defaultTemplate.createOptions()); myTemplateContext.put(template, defaultTemplate.createContext()); template.resetFrom(defaultTemplate); } } updateTemplateDetails(false, true); myTree.repaint(); } }; myTree.addMouseListener(new PopupHandler() { @Override public void invokePopup(Component comp, int x, int y) { final DefaultActionGroup group = new DefaultActionGroup(); group.add(rename); group.add(move); group.add(changeContext); group.add(revert); group.add(ActionManager.getInstance().getAction(IdeActions.ACTION_COPY)); group.add(ActionManager.getInstance().getAction(IdeActions.ACTION_PASTE)); ActionManager.getInstance().createActionPopupMenu(ActionPlaces.UNKNOWN, group).getComponent().show(comp, x, y); } }); } @Nullable TemplateGroup getSingleSelectedGroup() { return getGroup(getSingleSelectedIndex()); } private static Set<String> getAllGroups(Map<TemplateImpl, DefaultMutableTreeNode> templates) { Set<String> oldGroups = new HashSet<>(); for (TemplateImpl template : templates.keySet()) { oldGroups.add(template.getGroupName()); } return oldGroups; } Map<TemplateImpl, DefaultMutableTreeNode> getSelectedTemplates() { TreePath[] paths = myTree.getSelectionPaths(); if (paths == null) { return Collections.emptyMap(); } Map<TemplateImpl, DefaultMutableTreeNode> templates = new LinkedHashMap<>(); for (TreePath path : paths) { DefaultMutableTreeNode node = (DefaultMutableTreeNode)path.getLastPathComponent(); Object o = node.getUserObject(); if (!(o instanceof TemplateImpl)) { return Collections.emptyMap(); } templates.put((TemplateImpl)o, node); } return templates; } private void renameGroup() { final TemplateGroup templateGroup = getSingleSelectedGroup(); if (templateGroup == null) return; final String oldName = templateGroup.getName(); String newName = Messages.showInputDialog(myTree, "Enter the new group name:", "Rename", null, oldName, new TemplateGroupInputValidator(oldName)); if (newName != null && !newName.equals(oldName)) { templateGroup.setName(newName); ((DefaultTreeModel)myTree.getModel()).nodeChanged(getNode(getSingleSelectedIndex())); } } private void updateTemplateDetails(boolean focusKey, boolean forceReload) { int selected = getSingleSelectedIndex(); CardLayout layout = (CardLayout)myDetailsPanel.getLayout(); if (selected < 0 || getTemplate(selected) == null) { showEmptyCard(); } else { TemplateImpl newTemplate = getTemplate(selected); if (myCurrentTemplateEditor == null || forceReload || myCurrentTemplateEditor.getTemplate() != newTemplate) { if (myCurrentTemplateEditor != null) { myCurrentTemplateEditor.dispose(); } createTemplateEditor(newTemplate, myExpandByDefaultPanel.getSelectedString(), getTemplateOptions(newTemplate), getTemplateContext(newTemplate)); myCurrentTemplateEditor.resetUi(); if (focusKey) { myCurrentTemplateEditor.focusKey(); } } layout.show(myDetailsPanel, TEMPLATE_SETTINGS); } } private CheckedTreeNode registerTemplate(TemplateImpl template) { TemplateGroup newGroup = getTemplateGroup(template.getGroupName()); if (newGroup == null) { newGroup = new TemplateGroup(template.getGroupName()); insertNewGroup(newGroup); } newGroup.addElement(template); CheckedTreeNode node = new CheckedTreeNode(template); node.setChecked(!template.isDeactivated()); for (DefaultMutableTreeNode child = (DefaultMutableTreeNode)myTreeRoot.getFirstChild(); child != null; child = (DefaultMutableTreeNode)myTreeRoot.getChildAfter(child)) { if (((TemplateGroup)child.getUserObject()).getName().equals(template.getGroupName())) { int index = getIndexToInsert (child, template.getKey()); child.insert(node, index); ((DefaultTreeModel)myTree.getModel()).nodesWereInserted(child, new int[]{index}); setSelectedNode(node); } } return node; } private DefaultMutableTreeNode insertNewGroup(final TemplateGroup newGroup) { myTemplateGroups.add(newGroup); int index = getIndexToInsert(myTreeRoot, newGroup.getName()); DefaultMutableTreeNode groupNode = new CheckedTreeNode(newGroup); myTreeRoot.insert(groupNode, index); ((DefaultTreeModel)myTree.getModel()).nodesWereInserted(myTreeRoot, new int[]{index}); return groupNode; } private static int getIndexToInsert(DefaultMutableTreeNode parent, String key) { if (parent.getChildCount() == 0) return 0; int res = 0; for (DefaultMutableTreeNode child = (DefaultMutableTreeNode)parent.getFirstChild(); child != null; child = (DefaultMutableTreeNode)parent.getChildAfter(child)) { Object o = child.getUserObject(); String key1 = o instanceof TemplateImpl ? ((TemplateImpl)o).getKey() : ((TemplateGroup)o).getName(); if (key1.compareToIgnoreCase(key) > 0) return res; res++; } return res; } private void setSelectedNode(DefaultMutableTreeNode node) { TreePath path = new TreePath(node.getPath()); myTree.expandPath(path.getParentPath()); int row = myTree.getRowForPath(path); myTree.setSelectionRow(row); myTree.scrollRowToVisible(row); } private void removeNodeFromParent(DefaultMutableTreeNode node) { TreeNode parent = node.getParent(); int idx = parent.getIndex(node); node.removeFromParent(); ((DefaultTreeModel)myTree.getModel()).nodesWereRemoved(parent, new int[]{idx}, new TreeNode[]{node}); } private void initTemplates(List<? extends TemplateGroup> groups, String lastSelectedGroup, String lastSelectedKey) { myTreeRoot.removeAllChildren(); myTemplateGroups.clear(); mutatorHelper.clear(); for (TemplateGroup group : groups) { myTemplateGroups.add(mutatorHelper.copy(group)); } for (TemplateGroup group : myTemplateGroups) { CheckedTreeNode groupNode = new CheckedTreeNode(group); addTemplateNodes(group, groupNode); myTreeRoot.add(groupNode); } fireStructureChange(); selectTemplate(lastSelectedGroup, lastSelectedKey); } void selectNode(@NotNull String searchQuery) { Objects.requireNonNull(SpeedSearchSupply.getSupply(myTree, true)).findAndSelectElement(searchQuery); } private void selectTemplate(@Nullable final String groupName, @Nullable final String templateKey) { TreeUtil.traverseDepth(myTreeRoot, node -> { Object o = ((DefaultMutableTreeNode)node).getUserObject(); if (templateKey == null && o instanceof TemplateGroup && Comparing.equal(groupName, ((TemplateGroup)o).getName()) || o instanceof TemplateImpl && Comparing.equal(templateKey, ((TemplateImpl)o).getKey()) && Comparing.equal(groupName, ((TemplateImpl)o).getGroupName())) { setSelectedNode((DefaultMutableTreeNode)node); return false; } return true; }); } private void fireStructureChange() { ((DefaultTreeModel)myTree.getModel()).nodeStructureChanged(myTreeRoot); } private void addTemplateNodes(TemplateGroup group, CheckedTreeNode groupNode) { List<TemplateImpl> templates = new ArrayList<>(group.getElements()); Collections.sort(templates, TEMPLATE_COMPARATOR); for (final TemplateImpl template : templates) { myTemplateOptions.put(template, template.createOptions()); myTemplateContext.put(template, template.createContext()); CheckedTreeNode node = new CheckedTreeNode(template); node.setChecked(!template.isDeactivated()); groupNode.add(node); } } private class TemplateGroupInputValidator implements InputValidator { private final String myOldName; TemplateGroupInputValidator(String oldName) { myOldName = oldName; } @Override public boolean checkInput(String inputString) { return StringUtil.isNotEmpty(inputString) && (getTemplateGroup(inputString) == null || inputString.equals(myOldName)); } @Override public boolean canClose(String inputString) { return checkInput(inputString); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.server.datanode.web.webhdfs; import com.google.common.base.Preconditions; import io.netty.buffer.Unpooled; import io.netty.channel.ChannelFutureListener; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.SimpleChannelInboundHandler; import io.netty.handler.codec.http.DefaultFullHttpResponse; import io.netty.handler.codec.http.DefaultHttpResponse; import io.netty.handler.codec.http.HttpHeaders; import io.netty.handler.codec.http.HttpMethod; import io.netty.handler.codec.http.HttpRequest; import io.netty.handler.codec.http.QueryStringDecoder; import io.netty.handler.stream.ChunkedStream; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CreateFlag; import org.apache.hadoop.fs.MD5MD5CRC32FileChecksum; import org.apache.hadoop.fs.permission.FsCreateModes; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hdfs.DFSClient; import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys; import org.apache.hadoop.hdfs.client.HdfsDataInputStream; import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier; import org.apache.hadoop.hdfs.web.JsonUtil; import org.apache.hadoop.hdfs.web.WebHdfsFileSystem; import org.apache.hadoop.hdfs.web.resources.AclPermissionParam; import org.apache.hadoop.hdfs.web.resources.GetOpParam; import org.apache.hadoop.hdfs.web.resources.PostOpParam; import org.apache.hadoop.hdfs.web.resources.PutOpParam; import org.apache.hadoop.hdfs.web.resources.UserParam; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.util.LimitInputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.InetSocketAddress; import java.net.URI; import java.net.URISyntaxException; import java.nio.charset.StandardCharsets; import java.security.PrivilegedExceptionAction; import java.util.EnumSet; import static io.netty.handler.codec.http.HttpHeaders.Names.ACCEPT; import static io.netty.handler.codec.http.HttpHeaders.Names.ACCESS_CONTROL_ALLOW_HEADERS; import static io.netty.handler.codec.http.HttpHeaders.Names.ACCESS_CONTROL_ALLOW_METHODS; import static io.netty.handler.codec.http.HttpHeaders.Names.ACCESS_CONTROL_ALLOW_ORIGIN; import static io.netty.handler.codec.http.HttpHeaders.Names.ACCESS_CONTROL_MAX_AGE; import static io.netty.handler.codec.http.HttpHeaders.Names.CONNECTION; import static io.netty.handler.codec.http.HttpHeaders.Names.CONTENT_LENGTH; import static io.netty.handler.codec.http.HttpHeaders.Names.CONTENT_TYPE; import static io.netty.handler.codec.http.HttpHeaders.Names.LOCATION; import static io.netty.handler.codec.http.HttpHeaders.Values.CLOSE; import static io.netty.handler.codec.http.HttpHeaders.Values.KEEP_ALIVE; import static io.netty.handler.codec.http.HttpMethod.GET; import static io.netty.handler.codec.http.HttpMethod.OPTIONS; import static io.netty.handler.codec.http.HttpMethod.POST; import static io.netty.handler.codec.http.HttpMethod.PUT; import static io.netty.handler.codec.http.HttpResponseStatus.CONTINUE; import static io.netty.handler.codec.http.HttpResponseStatus.CREATED; import static io.netty.handler.codec.http.HttpResponseStatus.INTERNAL_SERVER_ERROR; import static io.netty.handler.codec.http.HttpResponseStatus.OK; import static io.netty.handler.codec.http.HttpVersion.HTTP_1_1; import static org.apache.hadoop.hdfs.protocol.HdfsConstants.HDFS_URI_SCHEME; import static org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier.HDFS_DELEGATION_KIND; public class WebHdfsHandler extends SimpleChannelInboundHandler<HttpRequest> { static final Log LOG = LogFactory.getLog(WebHdfsHandler.class); static final Log REQLOG = LogFactory.getLog("datanode.webhdfs"); public static final String WEBHDFS_PREFIX = WebHdfsFileSystem.PATH_PREFIX; public static final int WEBHDFS_PREFIX_LENGTH = WEBHDFS_PREFIX.length(); public static final String APPLICATION_OCTET_STREAM = "application/octet-stream"; public static final String APPLICATION_JSON_UTF8 = "application/json; charset=utf-8"; public static final EnumSet<CreateFlag> EMPTY_CREATE_FLAG = EnumSet.noneOf(CreateFlag.class); private final Configuration conf; private final Configuration confForCreate; private String path; private ParameterParser params; private UserGroupInformation ugi; private DefaultHttpResponse resp = null; public WebHdfsHandler(Configuration conf, Configuration confForCreate) throws IOException { this.conf = conf; this.confForCreate = confForCreate; /** set user pattern based on configuration file */ UserParam.setUserPattern( conf.get(HdfsClientConfigKeys.DFS_WEBHDFS_USER_PATTERN_KEY, HdfsClientConfigKeys.DFS_WEBHDFS_USER_PATTERN_DEFAULT)); AclPermissionParam.setAclPermissionPattern( conf.get(HdfsClientConfigKeys.DFS_WEBHDFS_ACL_PERMISSION_PATTERN_KEY, HdfsClientConfigKeys.DFS_WEBHDFS_ACL_PERMISSION_PATTERN_DEFAULT)); } @Override public void channelRead0(final ChannelHandlerContext ctx, final HttpRequest req) throws Exception { Preconditions.checkArgument(req.getUri().startsWith(WEBHDFS_PREFIX)); QueryStringDecoder queryString = new QueryStringDecoder(req.getUri()); params = new ParameterParser(queryString, conf); DataNodeUGIProvider ugiProvider = new DataNodeUGIProvider(params); ugi = ugiProvider.ugi(); path = params.path(); injectToken(); ugi.doAs(new PrivilegedExceptionAction<Void>() { @Override public Void run() throws Exception { try { handle(ctx, req); } finally { String host = null; try { host = ((InetSocketAddress)ctx.channel().remoteAddress()). getAddress().getHostAddress(); } catch (Exception e) { LOG.warn("Error retrieving hostname: ", e); host = "unknown"; } REQLOG.info(host + " " + req.getMethod() + " " + req.getUri() + " " + getResponseCode()); } return null; } }); } int getResponseCode() { return (resp == null) ? INTERNAL_SERVER_ERROR.code() : resp.getStatus().code(); } public void handle(ChannelHandlerContext ctx, HttpRequest req) throws IOException, URISyntaxException { String op = params.op(); HttpMethod method = req.getMethod(); if (PutOpParam.Op.CREATE.name().equalsIgnoreCase(op) && method == PUT) { onCreate(ctx); } else if (PostOpParam.Op.APPEND.name().equalsIgnoreCase(op) && method == POST) { onAppend(ctx); } else if (GetOpParam.Op.OPEN.name().equalsIgnoreCase(op) && method == GET) { onOpen(ctx); } else if(GetOpParam.Op.GETFILECHECKSUM.name().equalsIgnoreCase(op) && method == GET) { onGetFileChecksum(ctx); } else if(PutOpParam.Op.CREATE.name().equalsIgnoreCase(op) && method == OPTIONS) { allowCORSOnCreate(ctx); } else { throw new IllegalArgumentException("Invalid operation " + op); } } @Override public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) { LOG.debug("Error ", cause); resp = ExceptionHandler.exceptionCaught(cause); resp.headers().set(CONNECTION, CLOSE); ctx.writeAndFlush(resp).addListener(ChannelFutureListener.CLOSE); } private void onCreate(ChannelHandlerContext ctx) throws IOException, URISyntaxException { writeContinueHeader(ctx); final String nnId = params.namenodeId(); final int bufferSize = params.bufferSize(); final short replication = params.replication(); final long blockSize = params.blockSize(); final FsPermission unmaskedPermission = params.unmaskedPermission(); final FsPermission permission = unmaskedPermission == null ? params.permission() : FsCreateModes.create(params.permission(), unmaskedPermission); final boolean createParent = params.createParent(); EnumSet<CreateFlag> flags = params.createFlag(); if (flags.equals(EMPTY_CREATE_FLAG)) { flags = params.overwrite() ? EnumSet.of(CreateFlag.CREATE, CreateFlag.OVERWRITE) : EnumSet.of(CreateFlag.CREATE); } else { if(params.overwrite()) { flags.add(CreateFlag.OVERWRITE); } } final DFSClient dfsClient = newDfsClient(nnId, confForCreate); OutputStream out = dfsClient.createWrappedOutputStream(dfsClient.create( path, permission, flags, createParent, replication, blockSize, null, bufferSize, null), null); resp = new DefaultHttpResponse(HTTP_1_1, CREATED); final URI uri = new URI(HDFS_URI_SCHEME, nnId, path, null, null); resp.headers().set(LOCATION, uri.toString()); resp.headers().set(CONTENT_LENGTH, 0); resp.headers().set(ACCESS_CONTROL_ALLOW_ORIGIN, "*"); ctx.pipeline().replace(this, HdfsWriter.class.getSimpleName(), new HdfsWriter(dfsClient, out, resp)); } private void onAppend(ChannelHandlerContext ctx) throws IOException { writeContinueHeader(ctx); final String nnId = params.namenodeId(); final int bufferSize = params.bufferSize(); DFSClient dfsClient = newDfsClient(nnId, conf); OutputStream out = dfsClient.append(path, bufferSize, EnumSet.of(CreateFlag.APPEND), null, null); resp = new DefaultHttpResponse(HTTP_1_1, OK); resp.headers().set(CONTENT_LENGTH, 0); ctx.pipeline().replace(this, HdfsWriter.class.getSimpleName(), new HdfsWriter(dfsClient, out, resp)); } private void onOpen(ChannelHandlerContext ctx) throws IOException { final String nnId = params.namenodeId(); final int bufferSize = params.bufferSize(); final long offset = params.offset(); final long length = params.length(); resp = new DefaultHttpResponse(HTTP_1_1, OK); HttpHeaders headers = resp.headers(); // Allow the UI to access the file headers.set(ACCESS_CONTROL_ALLOW_METHODS, GET); headers.set(ACCESS_CONTROL_ALLOW_ORIGIN, "*"); headers.set(CONTENT_TYPE, APPLICATION_OCTET_STREAM); headers.set(CONNECTION, CLOSE); final DFSClient dfsclient = newDfsClient(nnId, conf); HdfsDataInputStream in = dfsclient.createWrappedInputStream( dfsclient.open(path, bufferSize, true)); in.seek(offset); long contentLength = in.getVisibleLength() - offset; if (length >= 0) { contentLength = Math.min(contentLength, length); } final InputStream data; if (contentLength >= 0) { headers.set(CONTENT_LENGTH, contentLength); data = new LimitInputStream(in, contentLength); } else { data = in; } ctx.write(resp); ctx.writeAndFlush(new ChunkedStream(data) { @Override public void close() throws Exception { super.close(); dfsclient.close(); } }).addListener(ChannelFutureListener.CLOSE); } private void onGetFileChecksum(ChannelHandlerContext ctx) throws IOException { MD5MD5CRC32FileChecksum checksum = null; final String nnId = params.namenodeId(); DFSClient dfsclient = newDfsClient(nnId, conf); try { checksum = dfsclient.getFileChecksum(path, Long.MAX_VALUE); dfsclient.close(); dfsclient = null; } finally { IOUtils.cleanup(LOG, dfsclient); } final byte[] js = JsonUtil.toJsonString(checksum).getBytes(StandardCharsets.UTF_8); resp = new DefaultFullHttpResponse(HTTP_1_1, OK, Unpooled.wrappedBuffer(js)); resp.headers().set(CONTENT_TYPE, APPLICATION_JSON_UTF8); resp.headers().set(CONTENT_LENGTH, js.length); resp.headers().set(CONNECTION, CLOSE); ctx.writeAndFlush(resp).addListener(ChannelFutureListener.CLOSE); } //Accept preflighted CORS requests private void allowCORSOnCreate(ChannelHandlerContext ctx) throws IOException, URISyntaxException { resp = new DefaultHttpResponse(HTTP_1_1, OK); HttpHeaders headers = resp.headers(); headers.set(ACCESS_CONTROL_ALLOW_ORIGIN, "*"); headers.set(ACCESS_CONTROL_ALLOW_HEADERS, ACCEPT); headers.set(ACCESS_CONTROL_ALLOW_METHODS, PUT); headers.set(ACCESS_CONTROL_MAX_AGE, 1728000); headers.set(CONTENT_LENGTH, 0); headers.set(CONNECTION, KEEP_ALIVE); ctx.writeAndFlush(resp).addListener(ChannelFutureListener.CLOSE); } private static void writeContinueHeader(ChannelHandlerContext ctx) { DefaultHttpResponse r = new DefaultFullHttpResponse(HTTP_1_1, CONTINUE, Unpooled.EMPTY_BUFFER); ctx.writeAndFlush(r); } private static DFSClient newDfsClient (String nnId, Configuration conf) throws IOException { URI uri = URI.create(HDFS_URI_SCHEME + "://" + nnId); return new DFSClient(uri, conf); } private void injectToken() throws IOException { if (UserGroupInformation.isSecurityEnabled()) { Token<DelegationTokenIdentifier> token = params.delegationToken(); token.setKind(HDFS_DELEGATION_KIND); ugi.addToken(token); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.tinkerpop.gremlin.process.traversal; import org.apache.tinkerpop.gremlin.process.traversal.step.TraversalOptionParent; import org.apache.tinkerpop.gremlin.process.traversal.strategy.TraversalStrategyProxy; import org.apache.tinkerpop.gremlin.structure.Edge; import org.apache.tinkerpop.gremlin.structure.Vertex; import org.apache.tinkerpop.gremlin.structure.VertexProperty; import org.apache.tinkerpop.gremlin.util.function.Lambda; import java.sql.Timestamp; import java.util.Date; import java.util.List; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.function.BiFunction; /** * A Translator will translate {@link Bytecode} into another representation. That representation may be a * Java instance via {@link StepTranslator} or a String script in some language via {@link ScriptTranslator}. * The parameterization of Translator is S (traversal source) and T (full translation). * * @author Marko A. Rodriguez (http://markorodriguez.com) * @author Stark Arya (sandszhou.zj@alibaba-inc.com) * @author Stephen Mallette (http://stephen.genoprime.com) */ public interface Translator<S, T> { /** * Get the {@link TraversalSource} representation rooting this translator. * For string-based translators ({@link ScriptTranslator}), this is typically a "g". * For java-based translators ({@link StepTranslator}), this is typically the {@link TraversalSource} instance * which the {@link Traversal} will be built from. * * @return the traversal source representation */ public S getTraversalSource(); /** * Translate {@link Bytecode} into a new representation. Typically, for language translations, the translation is * to a string representing the traversal in the respective scripting language. * * @param bytecode the bytecode representing traversal source and traversal manipulations. * @return the translated object */ public T translate(final Bytecode bytecode); /** * Translates a {@link Traversal} into the specified form */ public default T translate(final Traversal<?,?> t) { return translate(t.asAdmin().getBytecode()); } /** * Get the language that the translator is converting the traversal byte code to. * * @return the language of the translation */ public String getTargetLanguage(); /// /** * Translates bytecode to a Script representation. */ public interface ScriptTranslator extends Translator<String, Script> { /** * Provides a way for the {@link ScriptTranslator} to convert various data types to their string * representations in their target language. */ public interface TypeTranslator extends BiFunction<String, Object, Script> { } public abstract class AbstractTypeTranslator implements ScriptTranslator.TypeTranslator { protected static final String ANONYMOUS_TRAVERSAL_PREFIX = "__"; protected final boolean withParameters; protected final Script script; protected AbstractTypeTranslator(final boolean withParameters) { this.withParameters = withParameters; this.script = new Script(); } @Override public Script apply(final String traversalSource, final Object o) { this.script.init(); if (o instanceof Bytecode) { return produceScript(traversalSource, (Bytecode) o); } else { return convertToScript(o); } } /** * Gets the syntax for the spawn of an anonymous traversal which is traditionally the double underscore. */ protected String getAnonymousTraversalPrefix() { return ANONYMOUS_TRAVERSAL_PREFIX; } /** * Gets the syntax for a {@code null} value as a string representation. */ protected abstract String getNullSyntax(); /** * Take the string argument and convert it to a string representation in the target language (i.e. escape, * enclose in appropriate quotes, etc.) */ protected abstract String getSyntax(final String o); /** * Take the boolean argument and convert it to a string representation in the target language. */ protected abstract String getSyntax(final Boolean o); /** * Take the {@code Date} argument and convert it to a string representation in the target language. */ protected abstract String getSyntax(final Date o); /** * Take the {@code Timestamp} argument and convert it to a string representation in the target language. */ protected abstract String getSyntax(final Timestamp o); /** * Take the {@code UUID} argument and convert it to a string representation in the target language. */ protected abstract String getSyntax(final UUID o); /** * Take the {@link Lambda} argument and convert it to a string representation in the target language. */ protected abstract String getSyntax(final Lambda o); /** * Take the {@link SackFunctions.Barrier} argument and convert it to a string representation in the target language. */ protected abstract String getSyntax(final SackFunctions.Barrier o); /** * Take the {@link VertexProperty.Cardinality} argument and convert it to a string representation in the target language. */ protected abstract String getSyntax(final VertexProperty.Cardinality o); /** * Take the {@link TraversalOptionParent.Pick} argument and convert it to a string representation in the target language. */ protected abstract String getSyntax(final TraversalOptionParent.Pick o); /** * Take the numeric argument and convert it to a string representation in the target language. Languages * that can discern differences in types of numbers will wish to further check the type of the * {@code Number} instance itself (i.e. {@code Double}, {@code Integer}, etc.) */ protected abstract String getSyntax(final Number o); /** * Take the {@code Set} and writes the syntax directly to the member {@link #script} variable. */ protected abstract Script produceScript(final Set<?> o); /** * Take the {@code List} and writes the syntax directly to the member {@link #script} variable. */ protected abstract Script produceScript(final List<?> o); /** * Take the {@code Map} and writes the syntax directly to the member {@link #script} variable. */ protected abstract Script produceScript(final Map<?,?> o); /** * Take the {@code Class} and writes the syntax directly to the member {@link #script} variable. */ protected abstract Script produceScript(final Class<?> o); /** * Take the {@code Enum} and writes the syntax directly to the member {@link #script} variable. */ protected abstract Script produceScript(final Enum<?> o); /** * Take the {@link Vertex} and writes the syntax directly to the member {@link #script} variable. */ protected abstract Script produceScript(final Vertex o); /** * Take the {@link Edge} and writes the syntax directly to the member {@link #script} variable. */ protected abstract Script produceScript(final Edge o); /** * Take the {@link VertexProperty} and writes the syntax directly to the member {@link #script} variable. */ protected abstract Script produceScript(final VertexProperty<?> o); /** * Take the {@link TraversalStrategyProxy} and writes the syntax directly to the member {@link #script} variable. */ protected abstract Script produceScript(final TraversalStrategyProxy<?> o); /** * Take the {@link Bytecode} and writes the syntax directly to the member {@link #script} variable. */ protected abstract Script produceScript(final String traversalSource, final Bytecode o); /** * Take the {@link P} and writes the syntax directly to the member {@link #script} variable. This * implementation should also consider {@link TextP}. */ protected abstract Script produceScript(final P<?> p); /** * For each operator argument, if withParameters set true, try parametrization as follows: * * ----------------------------------------------- * if unpack, why ? ObjectType * ----------------------------------------------- * (Yes) Bytecode.Binding * (Recursion, No) Bytecode * (Recursion, No) Traversal * (Yes) String * (Recursion, No) Set * (Recursion, No) List * (Recursion, No) Map * (Yes) Long * (Yes) Double * (Yes) Float * (Yes) Integer * (Yes) Timestamp * (Yes) Date * (Yes) Uuid * (Recursion, No) P * (Enumeration, No) SackFunctions.Barrier * (Enumeration, No) VertexProperty.Cardinality * (Enumeration, No) TraversalOptionParent.Pick * (Enumeration, No) Enum * (Recursion, No) Vertex * (Recursion, No) Edge * (Recursion, No) VertexProperty * (Yes) Lambda * (Recursion, No) TraversalStrategyProxy * (Enumeration, No) TraversalStrategy * (Yes) Other * ------------------------------------------------- * * @param object * @return String Repres */ protected Script convertToScript(final Object object) { if (object instanceof Bytecode.Binding) { return script.getBoundKeyOrAssign(withParameters, ((Bytecode.Binding) object).variable()); } else if (object instanceof Bytecode) { return produceScript(getAnonymousTraversalPrefix(), (Bytecode) object); } else if (object instanceof Traversal) { return convertToScript(((Traversal) object).asAdmin().getBytecode()); } else if (object instanceof String) { final Object objectOrWrapper = withParameters ? object : getSyntax((String) object); return script.getBoundKeyOrAssign(withParameters, objectOrWrapper); } else if (object instanceof Boolean) { final Object objectOrWrapper = withParameters ? object : getSyntax((Boolean) object); return script.getBoundKeyOrAssign(withParameters, objectOrWrapper); } else if (object instanceof Set) { return produceScript((Set<?>) object); } else if (object instanceof List) { return produceScript((List<?>) object); } else if (object instanceof Map) { return produceScript((Map<?, ?>) object); } else if (object instanceof Number) { final Object objectOrWrapper = withParameters ? object : getSyntax((Number) object); return script.getBoundKeyOrAssign(withParameters, objectOrWrapper); } else if (object instanceof Class) { return produceScript((Class<?>) object); } else if (object instanceof Timestamp) { final Object objectOrWrapper = withParameters ? object : getSyntax((Timestamp) object); return script.getBoundKeyOrAssign(withParameters, objectOrWrapper); } else if (object instanceof Date) { final Object objectOrWrapper = withParameters ? object : getSyntax((Date) object); return script.getBoundKeyOrAssign(withParameters, objectOrWrapper); } else if (object instanceof UUID) { final Object objectOrWrapper = withParameters ? object : getSyntax((UUID) object); return script.getBoundKeyOrAssign(withParameters, objectOrWrapper); } else if (object instanceof P) { return produceScript((P<?>) object); } else if (object instanceof SackFunctions.Barrier) { return script.append(getSyntax((SackFunctions.Barrier) object)); } else if (object instanceof VertexProperty.Cardinality) { return script.append(getSyntax((VertexProperty.Cardinality) object)); } else if (object instanceof TraversalOptionParent.Pick) { return script.append(getSyntax((TraversalOptionParent.Pick) object)); } else if (object instanceof Enum) { return produceScript((Enum<?>) object); } else if (object instanceof Vertex) { return produceScript((Vertex) object); } else if (object instanceof Edge) { return produceScript((Edge) object); } else if (object instanceof VertexProperty) { return produceScript((VertexProperty<?>) object); } else if (object instanceof Lambda) { final Object objectOrWrapper = withParameters ? object : getSyntax((Lambda) object); return script.getBoundKeyOrAssign(withParameters,objectOrWrapper); } else if (object instanceof TraversalStrategyProxy) { return produceScript((TraversalStrategyProxy<?>) object); } else if (object instanceof TraversalStrategy) { return convertToScript(new TraversalStrategyProxy(((TraversalStrategy) object))); } else { return null == object ? script.append(getNullSyntax()) : script.getBoundKeyOrAssign(withParameters, object); } } } } /** * Translates bytecode to actual steps. */ public interface StepTranslator<S extends TraversalSource, T extends Traversal.Admin<?, ?>> extends Translator<S, T> { } }
/** * Copyright (C) 2011 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.analytics.financial.interestrate.swaption.provider; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import com.opengamma.analytics.financial.interestrate.annuity.derivative.AnnuityPaymentFixed; import com.opengamma.analytics.financial.interestrate.swaption.derivative.SwaptionPhysicalFixedIbor; import com.opengamma.analytics.financial.model.interestrate.HullWhiteOneFactorPiecewiseConstantInterestRateModel; import com.opengamma.analytics.financial.provider.calculator.discounting.CashFlowEquivalentCalculator; import com.opengamma.analytics.financial.provider.calculator.discounting.CashFlowEquivalentCurveSensitivityCalculator; import com.opengamma.analytics.financial.provider.description.interestrate.HullWhiteOneFactorProviderInterface; import com.opengamma.analytics.financial.provider.sensitivity.multicurve.MulticurveSensitivity; import com.opengamma.analytics.financial.provider.sensitivity.multicurve.MultipleCurrencyMulticurveSensitivity; import com.opengamma.analytics.math.statistics.distribution.NormalDistribution; import com.opengamma.analytics.math.statistics.distribution.ProbabilityDistribution; import com.opengamma.util.ArgumentChecker; import com.opengamma.util.money.Currency; import com.opengamma.util.money.MultipleCurrencyAmount; import com.opengamma.util.tuple.DoublesPair; /** * Method to computes the present value and sensitivities of physical delivery European swaptions with the Hull-White one factor model. * Reference: Henrard, M. (2003). Explicit bond option and swaption formula in Heath-Jarrow-Morton one-factor model. * International Journal of Theoretical and Applied Finance, 6(1):57--72. */ public final class SwaptionPhysicalFixedIborHullWhiteMethod { /** * The method unique instance. */ private static final SwaptionPhysicalFixedIborHullWhiteMethod INSTANCE = new SwaptionPhysicalFixedIborHullWhiteMethod(); /** * Return the unique instance of the class. * @return The instance. */ public static SwaptionPhysicalFixedIborHullWhiteMethod getInstance() { return INSTANCE; } /** * Private constructor. */ private SwaptionPhysicalFixedIborHullWhiteMethod() { } /** * The model used in computations. */ private static final HullWhiteOneFactorPiecewiseConstantInterestRateModel MODEL = new HullWhiteOneFactorPiecewiseConstantInterestRateModel(); /** * The cash flow equivalent calculator used in computations. */ private static final CashFlowEquivalentCalculator CFEC = CashFlowEquivalentCalculator.getInstance(); /** * The cash flow equivalent curve sensitivity calculator used in computations. */ private static final CashFlowEquivalentCurveSensitivityCalculator CFECSC = CashFlowEquivalentCurveSensitivityCalculator.getInstance(); /** * The normal distribution implementation. */ private static final ProbabilityDistribution<Double> NORMAL = new NormalDistribution(0, 1); /** * Computes the present value of the Physical delivery swaption. * @param swaption The swaption. * @param hullWhite The Hull-White parameters and the curves. * @return The present value. */ public MultipleCurrencyAmount presentValue(final SwaptionPhysicalFixedIbor swaption, final HullWhiteOneFactorProviderInterface hullWhite) { ArgumentChecker.notNull(swaption, "Swaption"); final AnnuityPaymentFixed cfe = swaption.getUnderlyingSwap().accept(CFEC, hullWhite.getMulticurveProvider()); return presentValue(swaption, cfe, hullWhite); } /** * Computes the present value of the Physical delivery swaption. * @param swaption The swaption. * @param cfe The swaption cash flow equivalent. * @param hullWhite The Hull-White parameters and the curves. * @return The present value. */ public MultipleCurrencyAmount presentValue(final SwaptionPhysicalFixedIbor swaption, final AnnuityPaymentFixed cfe, final HullWhiteOneFactorProviderInterface hullWhite) { ArgumentChecker.notNull(swaption, "Swaption"); ArgumentChecker.notNull(hullWhite, "Hull-White provider"); final double expiryTime = swaption.getTimeToExpiry(); final double[] alpha = new double[cfe.getNumberOfPayments()]; final double[] df = new double[cfe.getNumberOfPayments()]; final double[] discountedCashFlow = new double[cfe.getNumberOfPayments()]; for (int loopcf = 0; loopcf < cfe.getNumberOfPayments(); loopcf++) { alpha[loopcf] = MODEL.alpha(hullWhite.getHullWhiteParameters(), 0.0, expiryTime, expiryTime, cfe.getNthPayment(loopcf).getPaymentTime()); df[loopcf] = hullWhite.getMulticurveProvider().getDiscountFactor(swaption.getCurrency(), cfe.getNthPayment(loopcf).getPaymentTime()); discountedCashFlow[loopcf] = df[loopcf] * cfe.getNthPayment(loopcf).getAmount(); } final double kappa = MODEL.kappa(discountedCashFlow, alpha); final double omega = (swaption.getUnderlyingSwap().getFixedLeg().isPayer() ? -1.0 : 1.0); double pv = 0.0; for (int loopcf = 0; loopcf < cfe.getNumberOfPayments(); loopcf++) { pv += discountedCashFlow[loopcf] * NORMAL.getCDF(omega * (kappa + alpha[loopcf])); } return MultipleCurrencyAmount.of(swaption.getUnderlyingSwap().getFirstLeg().getCurrency(), pv * (swaption.isLong() ? 1.0 : -1.0)); } /** * Present value sensitivity to Hull-White volatility parameters. The present value is computed using the explicit formula. * @param swaption The physical delivery swaption. * @param hullWhite The Hull-White parameters and the curves. * @return The present value Hull-White parameters sensitivity. */ public double[] presentValueHullWhiteSensitivity(final SwaptionPhysicalFixedIbor swaption, final HullWhiteOneFactorProviderInterface hullWhite) { ArgumentChecker.notNull(swaption, "Swaption"); ArgumentChecker.notNull(hullWhite, "Hull-White provider"); final int nbSigma = hullWhite.getHullWhiteParameters().getVolatility().length; final double[] sigmaBar = new double[nbSigma]; final AnnuityPaymentFixed cfe = swaption.getUnderlyingSwap().accept(CFEC, hullWhite.getMulticurveProvider()); //Forward sweep final double expiryTime = swaption.getTimeToExpiry(); final double[] alpha = new double[cfe.getNumberOfPayments()]; final double[][] alphaDerivatives = new double[cfe.getNumberOfPayments()][nbSigma]; final double[] df = new double[cfe.getNumberOfPayments()]; final double[] discountedCashFlow = new double[cfe.getNumberOfPayments()]; for (int loopcf = 0; loopcf < cfe.getNumberOfPayments(); loopcf++) { alpha[loopcf] = MODEL.alpha(hullWhite.getHullWhiteParameters(), 0.0, expiryTime, expiryTime, cfe.getNthPayment(loopcf).getPaymentTime(), alphaDerivatives[loopcf]); df[loopcf] = hullWhite.getMulticurveProvider().getDiscountFactor(swaption.getCurrency(), cfe.getNthPayment(loopcf).getPaymentTime()); discountedCashFlow[loopcf] = df[loopcf] * cfe.getNthPayment(loopcf).getAmount(); } final double kappa = MODEL.kappa(discountedCashFlow, alpha); final double omega = (swaption.getUnderlyingSwap().getFixedLeg().isPayer() ? -1.0 : 1.0); //Backward sweep final double pvBar = 1.0; final double[] alphaBar = new double[cfe.getNumberOfPayments()]; for (int loopcf = 0; loopcf < cfe.getNumberOfPayments(); loopcf++) { alphaBar[loopcf] = discountedCashFlow[loopcf] * NORMAL.getPDF(omega * (kappa + alpha[loopcf])) * omega * pvBar; } for (int loopsigma = 0; loopsigma < nbSigma; loopsigma++) { for (int loopcf = 0; loopcf < cfe.getNumberOfPayments(); loopcf++) { sigmaBar[loopsigma] += alphaDerivatives[loopcf][loopsigma] * alphaBar[loopcf]; } } if (!swaption.isLong()) { for (int loopsigma = 0; loopsigma < nbSigma; loopsigma++) { sigmaBar[loopsigma] *= -1.0; } } return sigmaBar; } /** * Present value sensitivity to the curves. The present value is computed using the explicit formula. * @param swaption The physical delivery swaption. * @param hullWhite The Hull-White parameters and the curves. * @return The present value curve sensitivity. */ public MultipleCurrencyMulticurveSensitivity presentValueCurveSensitivity(final SwaptionPhysicalFixedIbor swaption, final HullWhiteOneFactorProviderInterface hullWhite) { ArgumentChecker.notNull(swaption, "Swaption"); ArgumentChecker.notNull(hullWhite, "Hull-White provider"); final Currency ccy = swaption.getCurrency(); final int nbSigma = hullWhite.getHullWhiteParameters().getVolatility().length; final AnnuityPaymentFixed cfe = swaption.getUnderlyingSwap().accept(CFEC, hullWhite.getMulticurveProvider()); //Forward sweep final double expiryTime = swaption.getTimeToExpiry(); final double[] alpha = new double[cfe.getNumberOfPayments()]; final double[][] alphaDerivatives = new double[cfe.getNumberOfPayments()][nbSigma]; final double[] df = new double[cfe.getNumberOfPayments()]; final double[] discountedCashFlow = new double[cfe.getNumberOfPayments()]; for (int loopcf = 0; loopcf < cfe.getNumberOfPayments(); loopcf++) { alpha[loopcf] = MODEL.alpha(hullWhite.getHullWhiteParameters(), 0.0, expiryTime, expiryTime, cfe.getNthPayment(loopcf).getPaymentTime(), alphaDerivatives[loopcf]); df[loopcf] = hullWhite.getMulticurveProvider().getDiscountFactor(swaption.getCurrency(), cfe.getNthPayment(loopcf).getPaymentTime()); discountedCashFlow[loopcf] = df[loopcf] * cfe.getNthPayment(loopcf).getAmount(); } final double kappa = MODEL.kappa(discountedCashFlow, alpha); final double omega = (swaption.getUnderlyingSwap().getFixedLeg().isPayer() ? -1.0 : 1.0); final double[] ncdf = new double[cfe.getNumberOfPayments()]; for (int loopcf = 0; loopcf < cfe.getNumberOfPayments(); loopcf++) { ncdf[loopcf] = NORMAL.getCDF(omega * (kappa + alpha[loopcf])); } //Backward sweep final double pvBar = 1.0; final double[] discountedCashFlowBar = new double[cfe.getNumberOfPayments()]; final double[] dfBar = new double[cfe.getNumberOfPayments()]; final double[] cfeAmountBar = new double[cfe.getNumberOfPayments()]; final List<DoublesPair> listDfSensi = new ArrayList<>(); for (int loopcf = 0; loopcf < cfe.getNumberOfPayments(); loopcf++) { discountedCashFlowBar[loopcf] = ncdf[loopcf] * pvBar; dfBar[loopcf] = cfe.getNthPayment(loopcf).getAmount() * discountedCashFlowBar[loopcf]; cfeAmountBar[loopcf] = df[loopcf] * discountedCashFlowBar[loopcf]; final DoublesPair dfSensi = DoublesPair.of(cfe.getNthPayment(loopcf).getPaymentTime(), -cfe.getNthPayment(loopcf).getPaymentTime() * df[loopcf] * dfBar[loopcf]); listDfSensi.add(dfSensi); } final Map<String, List<DoublesPair>> pvsDF = new HashMap<>(); pvsDF.put(hullWhite.getMulticurveProvider().getName(ccy), listDfSensi); MulticurveSensitivity sensitivity = MulticurveSensitivity.ofYieldDiscounting(pvsDF); final Map<Double, MulticurveSensitivity> cfeCurveSensi = swaption.getUnderlyingSwap().accept(CFECSC, hullWhite.getMulticurveProvider()); for (int loopcf = 0; loopcf < cfe.getNumberOfPayments(); loopcf++) { final MulticurveSensitivity sensiCfe = cfeCurveSensi.get(cfe.getNthPayment(loopcf).getPaymentTime()); if (!(sensiCfe == null)) { // There is some sensitivity to that cfe. sensitivity = sensitivity.plus(sensiCfe.multipliedBy(cfeAmountBar[loopcf])); } } if (!swaption.isLong()) { return MultipleCurrencyMulticurveSensitivity.of(ccy, sensitivity.multipliedBy(-1.0)); } return MultipleCurrencyMulticurveSensitivity.of(ccy, sensitivity); } }
package com.jsonde.profiler; import com.jsonde.api.methodCall.MethodCallDto; import com.jsonde.api.methodCall.MethodCallDtoFactory; import com.jsonde.api.methodCall.MethodCallSummaryDto; import java.util.ArrayList; import java.util.LinkedList; import java.util.List; public class ThreadLocalProfiler { private int maxMethodCallGraphSize = 2000; private final Profiler profiler; public ThreadLocalProfiler(Profiler profiler) { this.profiler = profiler; } private boolean isProfilerCode; private int level = 0; private int count = 0; private LinkedList<MethodCallDto> methodCallsQueue = new LinkedList<MethodCallDto>(); private List<MethodCallDto> finishedMethodCalls = new ArrayList<MethodCallDto>(maxMethodCallGraphSize); private MethodCallSummaryDto contextMethodCallSummary; // todo: find out why some method ids are missing? private MethodCallSummaryDto rootContextMethodCallSummary; protected void preEnterConstructorImpl(long methodId) { enterMethodImpl(methodId, null, null); } protected void enterConstructorImpl(long methodId, Object object, Object[] arguments) { if (isProfilerCode) return; try { isProfilerCode = true; MethodCallDto contextMethodCallDto = methodCallsQueue.element(); if (methodId == contextMethodCallDto.methodId) { if (null != object) { contextMethodCallDto.actualClassId = profiler.generateClassIdAndRegisterIfAbsent(object.getClass()); contextMethodCallDto.flags |= 1 << MethodCallDto.ACTUAL_CLASS_ID_SET_FLAG; } } } finally { isProfilerCode = false; } } protected void enterMethodImpl(long methodId, Object object, Object[] arguments) { if (isProfilerCode) return; try { isProfilerCode = true; level++; //MethodCallDto currentMethodCallDto = new MethodCallDto(profiler.generateMethodCallId(), methodId); MethodCallDto currentMethodCallDto = MethodCallDtoFactory.getMethodCallDtoFromPool(); currentMethodCallDto.methodCallId = profiler.generateMethodCallId(); currentMethodCallDto.methodId = methodId; currentMethodCallDto.executionTime = System.currentTimeMillis(); if (null != object) { Class clazz = object.getClass(); long classId = profiler.generateClassIdAndRegisterIfAbsent(clazz); currentMethodCallDto.actualClassId = classId; currentMethodCallDto.flags |= 1 << MethodCallDto.ACTUAL_CLASS_ID_SET_FLAG; } if (level > 1) { currentMethodCallDto.callerId = methodCallsQueue.element().methodCallId; currentMethodCallDto.flags |= 1 << MethodCallDto.CALLER_ID_SET_FLAG; } methodCallsQueue.addFirst(currentMethodCallDto); { // todo implement cpu profiling stuff MethodCallSummaryDto currentMethodCallSummary; if (null == contextMethodCallSummary) { currentMethodCallSummary = new MethodCallSummaryDto(); rootContextMethodCallSummary = currentMethodCallSummary; } else { currentMethodCallSummary = contextMethodCallSummary.getCallee(methodId); } contextMethodCallSummary = currentMethodCallSummary; contextMethodCallSummary.methodId = methodId; contextMethodCallSummary.invocationCount++; contextMethodCallSummary.startTime = System.nanoTime(); } } finally { isProfilerCode = false; } } public void dump() { System.out.println(Thread.currentThread().getId() + " thread level is " + level); System.out.println(count); System.out.println(contextMethodCallSummary); System.out.println(rootContextMethodCallSummary); } protected void leaveMethodImpl(boolean isVoid, boolean isThrowsException, Object result) { if (isProfilerCode) return; try { isProfilerCode = true; // debug(false); level--; count++; MethodCallDto contextMethodCallDto = methodCallsQueue.removeFirst(); contextMethodCallDto.executionTime = System.currentTimeMillis() - contextMethodCallDto.executionTime; if (!isVoid) contextMethodCallDto.flags |= 1 << MethodCallDto.RETURN_VALUE_FLAG; if (isThrowsException) contextMethodCallDto.flags |= 1 << MethodCallDto.THROW_EXCEPTION_FLAG; finishedMethodCalls.add(contextMethodCallDto); { // Begin cpu profiler stuff if (isThrowsException) contextMethodCallSummary.exceptionCount++; contextMethodCallSummary.executionTime += System.nanoTime() - contextMethodCallSummary.startTime; } if (0 == level) { // Method call graph building finished profiler.processMethodCall(finishedMethodCalls, rootContextMethodCallSummary, true); count = 0; finishedMethodCalls.clear(); rootContextMethodCallSummary = null; } else { // Method call graph building in progress if (0 == count % maxMethodCallGraphSize) { if (0 == count % (maxMethodCallGraphSize * 5)) { profiler.processMethodCall(finishedMethodCalls, rootContextMethodCallSummary, false); } else { profiler.processMethodCall(finishedMethodCalls, null, false); } finishedMethodCalls.clear(); } } contextMethodCallSummary = contextMethodCallSummary.caller; } finally { isProfilerCode = false; } } public int getMaxMethodCallGraphSize() { return maxMethodCallGraphSize; } public void setMaxMethodCallGraphSize(int maxMethodCallGraphSize) { this.maxMethodCallGraphSize = maxMethodCallGraphSize; // todo resize finishedMethodCalls } }
package org.maltparser.core.syntaxgraph.writer; import java.io.BufferedWriter; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.UnsupportedEncodingException; import java.util.SortedMap; import java.util.regex.PatternSyntaxException; import org.maltparser.core.exception.MaltChainedException; import org.maltparser.core.io.dataformat.ColumnDescription; import org.maltparser.core.io.dataformat.DataFormatException; import org.maltparser.core.io.dataformat.DataFormatInstance; import org.maltparser.core.symbol.SymbolTable; import org.maltparser.core.symbol.SymbolTableHandler; import org.maltparser.core.syntaxgraph.PhraseStructure; import org.maltparser.core.syntaxgraph.TokenStructure; import org.maltparser.core.syntaxgraph.node.NonTerminalNode; import org.maltparser.core.syntaxgraph.node.PhraseStructureNode; import org.maltparser.core.syntaxgraph.node.TokenNode; /** * * * @author Johan Hall */ public class BracketWriter implements SyntaxGraphWriter { private enum PennWriterFormat { DEFAULT, PRETTY }; private PennWriterFormat format; private BufferedWriter writer; private DataFormatInstance dataFormatInstance; private SortedMap<String,ColumnDescription> inputColumns; private SortedMap<String,ColumnDescription> edgeLabelColumns; private SortedMap<String,ColumnDescription> phraseLabelColumns; private char STARTING_BRACKET = '('; private String EMPTY_EDGELABEL = "??"; private char CLOSING_BRACKET = ')'; private char INPUT_SEPARATOR = ' '; private char EDGELABEL_SEPARATOR = '-'; private char SENTENCE_SEPARATOR = '\n'; private String optionString; private boolean closeStream = true; public BracketWriter() { } public void open(String fileName, String charsetName) throws MaltChainedException { try { open(new OutputStreamWriter(new FileOutputStream(fileName),charsetName)); } catch (FileNotFoundException e) { throw new DataFormatException("The output file '"+fileName+"' cannot be found.", e); } catch (UnsupportedEncodingException e) { throw new DataFormatException("The character encoding set '"+charsetName+"' isn't supported.", e); } } public void open(OutputStream os, String charsetName) throws MaltChainedException { try { if (os == System.out || os == System.err) { closeStream = false; } open(new OutputStreamWriter(os, charsetName)); } catch (UnsupportedEncodingException e) { throw new DataFormatException("The character encoding set '"+charsetName+"' isn't supported.", e); } } private void open(OutputStreamWriter osw) throws MaltChainedException { setWriter(new BufferedWriter(osw)); } public void writeEpilog() throws MaltChainedException { } public void writeProlog() throws MaltChainedException { } public void writeSentence(TokenStructure syntaxGraph) throws MaltChainedException { if (syntaxGraph == null || dataFormatInstance == null) { return; } if (syntaxGraph instanceof PhraseStructure && syntaxGraph.hasTokens()) { // PhraseStructure phraseStructure = ((PhraseStructure) syntaxGraph); if (format == PennWriterFormat.PRETTY) { writeElement(syntaxGraph.getSymbolTables(), ((PhraseStructure) syntaxGraph).getPhraseStructureRoot(), 0); } else { writeElement(syntaxGraph.getSymbolTables(), ((PhraseStructure) syntaxGraph).getPhraseStructureRoot()); } try { writer.write(SENTENCE_SEPARATOR); writer.flush(); } catch (IOException e) { close(); throw new DataFormatException("Could not write to the output file. ", e); } } } private void writeElement(SymbolTableHandler symbolTables, PhraseStructureNode element) throws MaltChainedException { try { if (element instanceof TokenNode) { PhraseStructureNode t = (PhraseStructureNode)element; SymbolTable table = null; writer.write(STARTING_BRACKET); int i = 0; for (String inputColumn : inputColumns.keySet()) { if (i != 0) { writer.write(INPUT_SEPARATOR); } table = symbolTables.getSymbolTable(inputColumns.get(inputColumn).getName()); if (t.hasLabel(table)) { writer.write(t.getLabelSymbol(table)); } if (i == 0) { for (String edgeLabelColumn : edgeLabelColumns.keySet()) { table = symbolTables.getSymbolTable(edgeLabelColumns.get(edgeLabelColumn).getName()); if (t.hasParentEdgeLabel(table) && !t.getParent().isRoot() && !t.getParentEdgeLabelSymbol(table).equals(EMPTY_EDGELABEL)) { writer.write(EDGELABEL_SEPARATOR); writer.write(t.getParentEdgeLabelSymbol(table)); } } } i++; } writer.write(CLOSING_BRACKET); } else { NonTerminalNode nt = (NonTerminalNode)element; writer.write(STARTING_BRACKET); SymbolTable table = null; int i = 0; for (String phraseLabelColumn : phraseLabelColumns.keySet()) { if (i != 0) { writer.write(INPUT_SEPARATOR); } table = symbolTables.getSymbolTable(phraseLabelColumns.get(phraseLabelColumn).getName()); if (nt.hasLabel(table)) { writer.write(nt.getLabelSymbol(table)); } if (i == 0) { for (String edgeLabelColumn : edgeLabelColumns.keySet()) { table = symbolTables.getSymbolTable(edgeLabelColumns.get(edgeLabelColumn).getName()); if (nt.hasParentEdgeLabel(table) && !nt.getParent().isRoot() && !nt.getParentEdgeLabelSymbol(table).equals(EMPTY_EDGELABEL)) { writer.write(EDGELABEL_SEPARATOR); writer.write(nt.getParentEdgeLabelSymbol(table)); } } } i++; } for (PhraseStructureNode node : ((NonTerminalNode)element).getChildren()) { writeElement(symbolTables, node); } writer.write(CLOSING_BRACKET); } } catch (IOException e) { throw new DataFormatException("Could not write to the output file. ", e); } } private String getIndentation(int depth) { StringBuilder sb = new StringBuilder(""); for (int i = 0; i < depth; i++) { sb.append("\t"); } return sb.toString(); } private void writeElement(SymbolTableHandler symbolTables, PhraseStructureNode element, int depth) throws MaltChainedException { try { if (element instanceof TokenNode) { PhraseStructureNode t = (PhraseStructureNode)element; SymbolTable table = null; writer.write("\n" + getIndentation(depth) + STARTING_BRACKET); int i = 0; for (String inputColumn : inputColumns.keySet()) { if (i != 0) { writer.write(INPUT_SEPARATOR); } table = symbolTables.getSymbolTable(inputColumns.get(inputColumn).getName()); if (t.hasLabel(table)) { writer.write(encodeString(t.getLabelSymbol(table))); } if (i == 0) { for (String edgeLabelColumn : edgeLabelColumns.keySet()) { table = symbolTables.getSymbolTable(edgeLabelColumns.get(edgeLabelColumn).getName()); if (t.hasParentEdgeLabel(table) && !t.getParent().isRoot() && !t.getParentEdgeLabelSymbol(table).equals(EMPTY_EDGELABEL)) { writer.write(EDGELABEL_SEPARATOR); writer.write(t.getParentEdgeLabelSymbol(table)); } } } i++; } writer.write(CLOSING_BRACKET); } else { NonTerminalNode nt = (NonTerminalNode)element; writer.write("\n" + getIndentation(depth) + STARTING_BRACKET); SymbolTable table = null; int i = 0; for (String phraseLabelColumn : phraseLabelColumns.keySet()) { if (i != 0) { writer.write(INPUT_SEPARATOR); } table = symbolTables.getSymbolTable(phraseLabelColumns.get(phraseLabelColumn).getName()); if (nt.hasLabel(table)) { writer.write(nt.getLabelSymbol(table)); } if (i == 0) { for (String edgeLabelColumn : edgeLabelColumns.keySet()) { table = symbolTables.getSymbolTable(edgeLabelColumns.get(edgeLabelColumn).getName()); if (nt.hasParentEdgeLabel(table) && !nt.getParent().isRoot() && !nt.getParentEdgeLabelSymbol(table).equals(EMPTY_EDGELABEL)) { writer.write(EDGELABEL_SEPARATOR); writer.write(nt.getParentEdgeLabelSymbol(table)); } } } i++; } for (PhraseStructureNode node : ((NonTerminalNode)element).getChildren()) { writeElement(symbolTables, node, depth + 1); } writer.write("\n" + getIndentation(depth) + CLOSING_BRACKET); } } catch (IOException e) { throw new DataFormatException("Could not write to the output file. ", e); } } public BufferedWriter getWriter() { return writer; } public void setWriter(BufferedWriter writer) throws MaltChainedException { close(); this.writer = writer; } public DataFormatInstance getDataFormatInstance() { return dataFormatInstance; } public void setDataFormatInstance(DataFormatInstance dataFormatInstance) { this.dataFormatInstance = dataFormatInstance; inputColumns = dataFormatInstance.getInputColumnDescriptions(); edgeLabelColumns = dataFormatInstance.getPhraseStructureEdgeLabelColumnDescriptions(); phraseLabelColumns = dataFormatInstance.getPhraseStructureNodeLabelColumnDescriptions(); } public String getOptions() { return optionString; } public void setOptions(String optionString) throws MaltChainedException { this.optionString = optionString; format = PennWriterFormat.DEFAULT; String[] argv; try { argv = optionString.split("[_\\p{Blank}]"); } catch (PatternSyntaxException e) { throw new DataFormatException("Could not split the bracket writer option '"+optionString+"'. ", e); } for (int i=0; i < argv.length-1; i++) { if(argv[i].charAt(0) != '-') { throw new DataFormatException("The argument flag should start with the following character '-', not with "+argv[i].charAt(0)); } if(++i>=argv.length) { throw new DataFormatException("The last argument does not have any value. "); } switch(argv[i-1].charAt(1)) { case 'f': if (argv[i].equals("p")) { format = PennWriterFormat.PRETTY; } else if (argv[i].equals("p")) { format = PennWriterFormat.DEFAULT; } break; default: throw new DataFormatException("Unknown bracket writer option: '"+argv[i-1]+"' with value '"+argv[i]+"'. "); } } } public void close() throws MaltChainedException { try { if (writer != null) { writer.flush(); if (closeStream) { writer.close(); } writer = null; } } catch (IOException e) { throw new DataFormatException("Could not close the output file. ", e); } } private String encodeString(String string) { return string.replace("(", "-LRB-").replace(")", "-RRB-").replace("[", "-LSB-").replace("]", "-RSB-").replace("{", "-LCB-").replace("}", "-RCB-"); } }
package com.adjust.sdk; import android.content.ContentResolver; import android.content.Context; import android.content.pm.PackageInfo; import android.content.pm.PackageManager; import android.content.res.Configuration; import android.content.res.Resources; import android.database.Cursor; import android.net.Uri; import android.os.Build; import android.util.DisplayMetrics; import java.util.Locale; import java.util.Map; import static com.adjust.sdk.Constants.HIGH; import static com.adjust.sdk.Constants.LARGE; import static com.adjust.sdk.Constants.LONG; import static com.adjust.sdk.Constants.LOW; import static com.adjust.sdk.Constants.MEDIUM; import static com.adjust.sdk.Constants.NORMAL; import static com.adjust.sdk.Constants.SMALL; import static com.adjust.sdk.Constants.XLARGE; /** * Created by pfms on 06/11/14. */ class DeviceInfo { String macSha1; String macShortMd5; String androidId; String fbAttributionId; String clientSdk; String packageName; String appVersion; String deviceType; String deviceName; String deviceManufacturer; String osName; String osVersion; String language; String country; String screenSize; String screenFormat; String screenDensity; String displayWidth; String displayHeight; Map<String, String> pluginKeys; DeviceInfo(Context context, String sdkPrefix) { Resources resources = context.getResources(); DisplayMetrics displayMetrics = resources.getDisplayMetrics(); Configuration configuration = resources.getConfiguration(); Locale locale = configuration.locale; int screenLayout = configuration.screenLayout; boolean isGooglePlayServicesAvailable = Reflection.getPlayAdId(context) != null; String macAddress = getMacAddress(context, isGooglePlayServicesAvailable); packageName = getPackageName(context); appVersion = getAppVersion(context); deviceType = getDeviceType(screenLayout); deviceName = getDeviceName(); deviceManufacturer = getDeviceManufacturer(); osName = getOsName(); osVersion = getOsVersion(); language = getLanguage(locale); country = getCountry(locale); screenSize = getScreenSize(screenLayout); screenFormat = getScreenFormat(screenLayout); screenDensity = getScreenDensity(displayMetrics); displayWidth = getDisplayWidth(displayMetrics); displayHeight = getDisplayHeight(displayMetrics); clientSdk = getClientSdk(sdkPrefix); androidId = getAndroidId(context, isGooglePlayServicesAvailable); fbAttributionId = getFacebookAttributionId(context); pluginKeys = Reflection.getPluginKeys(context); macSha1 = getMacSha1(macAddress); macShortMd5 = getMacShortMd5(macAddress); } private String getMacAddress(Context context, boolean isGooglePlayServicesAvailable) { if (!isGooglePlayServicesAvailable) { if (!Util.checkPermission(context, android.Manifest.permission.ACCESS_WIFI_STATE)) { AdjustFactory.getLogger().warn("Missing permission: ACCESS_WIFI_STATE"); } return Reflection.getMacAddress(context); } else { return null; } } private String getPackageName(Context context) { return context.getPackageName(); } private String getAppVersion(Context context) { try { PackageManager packageManager = context.getPackageManager(); String name = context.getPackageName(); PackageInfo info = packageManager.getPackageInfo(name, 0); return info.versionName; } catch (PackageManager.NameNotFoundException e) { return null; } } private String getDeviceType(int screenLayout) { int screenSize = screenLayout & Configuration.SCREENLAYOUT_SIZE_MASK; switch (screenSize) { case Configuration.SCREENLAYOUT_SIZE_SMALL: case Configuration.SCREENLAYOUT_SIZE_NORMAL: return "phone"; case Configuration.SCREENLAYOUT_SIZE_LARGE: case 4: return "tablet"; default: return null; } } private String getDeviceName() { return Build.MODEL; } private String getDeviceManufacturer() { return Build.MANUFACTURER; } private String getOsName() { return "android"; } private String getOsVersion() { return osVersion = "" + Build.VERSION.SDK_INT; } private String getLanguage(Locale locale) { return locale.getLanguage(); } private String getCountry(Locale locale) { return locale.getCountry(); } private String getScreenSize(int screenLayout) { int screenSize = screenLayout & Configuration.SCREENLAYOUT_SIZE_MASK; switch (screenSize) { case Configuration.SCREENLAYOUT_SIZE_SMALL: return SMALL; case Configuration.SCREENLAYOUT_SIZE_NORMAL: return NORMAL; case Configuration.SCREENLAYOUT_SIZE_LARGE: return LARGE; case 4: return XLARGE; default: return null; } } private String getScreenFormat(int screenLayout) { int screenFormat = screenLayout & Configuration.SCREENLAYOUT_LONG_MASK; switch (screenFormat) { case Configuration.SCREENLAYOUT_LONG_YES: return LONG; case Configuration.SCREENLAYOUT_LONG_NO: return NORMAL; default: return null; } } private String getScreenDensity(DisplayMetrics displayMetrics) { int density = displayMetrics.densityDpi; int low = (DisplayMetrics.DENSITY_MEDIUM + DisplayMetrics.DENSITY_LOW) / 2; int high = (DisplayMetrics.DENSITY_MEDIUM + DisplayMetrics.DENSITY_HIGH) / 2; if (density == 0) { return null; } else if (density < low) { return LOW; } else if (density > high) { return HIGH; } return MEDIUM; } private String getDisplayWidth(DisplayMetrics displayMetrics) { return String.valueOf(displayMetrics.widthPixels); } private String getDisplayHeight(DisplayMetrics displayMetrics) { return String.valueOf(displayMetrics.heightPixels); } private String getClientSdk(String sdkPrefix) { if (sdkPrefix == null) { return Constants.CLIENT_SDK; } else { return String.format(Locale.US, "%s@%s", sdkPrefix, Constants.CLIENT_SDK); } } private String getMacSha1(String macAddress) { if (macAddress == null) { return null; } String macSha1 = Util.sha1(macAddress); return macSha1; } private String getMacShortMd5(String macAddress) { if (macAddress == null) { return null; } String macShort = macAddress.replaceAll(":", ""); String macShortMd5 = Util.md5(macShort); return macShortMd5; } private String getAndroidId(Context context, boolean isGooglePlayServicesAvailable) { if (!isGooglePlayServicesAvailable) { return Reflection.getAndroidId(context); } else { return null; } } private String getFacebookAttributionId(final Context context) { try { final ContentResolver contentResolver = context.getContentResolver(); final Uri uri = Uri.parse("content://com.facebook.katana.provider.AttributionIdProvider"); final String columnName = "aid"; final String[] projection = {columnName}; final Cursor cursor = contentResolver.query(uri, projection, null, null, null); if (cursor == null) { return null; } if (!cursor.moveToFirst()) { cursor.close(); return null; } final String attributionId = cursor.getString(cursor.getColumnIndex(columnName)); cursor.close(); return attributionId; } catch (Exception e) { return null; } } }
package com.veil.ai; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectInputStream; import java.io.ObjectOutput; import java.io.ObjectOutputStream; import java.io.ObjectStreamException; import java.io.Serializable; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import com.badlogic.gdx.files.FileHandle; import com.badlogic.gdx.math.Vector2; import com.veil.game.GameConstant; import com.veil.game.element.DynamicEntity; public class RangeProfile { public class DistanceLog implements Serializable { private static final long serialVersionUID = 8048529594171440656L; private int speed; private List<Integer> list; private List<Boolean> miss; public DistanceLog(int speed){ this.speed = speed; list = new ArrayList<Integer>(); miss = new ArrayList<Boolean>(); } public void add(int distance, boolean playerDamaged){ list.add(distance); miss.add(playerDamaged); } public int sum(){ int sum = 0; for(int i=0; i<list.size(); i++){ if(!miss.get(i)) sum += list.get(i); } return sum; } public int sampleSize(){ int validSize = 0; for(int i=0; i<list.size(); i++){ if(!miss.get(i)) validSize++; } return validSize; } public float average(){ if(list.size() == 0) return 0; int size = sampleSize(); if(size == 0) return 0; return 1f*sum()/size; } @Override public String toString(){ StringBuilder strb = new StringBuilder(); strb.append(speed).append(" :"); for(int i=0; i<list.size(); i++){ strb.append(" "); if(miss.get(i)){ strb.append("-"); } strb.append(list.get(i)); } return strb.toString(); } private void writeObject(java.io.ObjectOutputStream out) throws IOException { out.writeInt(speed); out.writeInt(list.size()); for(int data : list){ out.writeInt(data); } out.writeInt(miss.size()); for(boolean data : miss){ out.writeBoolean(data); } } private void readObject(java.io.ObjectInputStream in) throws IOException, ClassNotFoundException { speed = in.readInt(); int listSize = in.readInt(); list = new ArrayList<Integer>(listSize); for(int i=0; i<listSize; i++){ list.add(in.readInt()); } listSize = in.readInt(); miss = new ArrayList<Boolean>(listSize); for(int i=0; i<listSize; i++){ miss.add(in.readBoolean()); } } @SuppressWarnings("unused") private void readObjectNoData() throws ObjectStreamException { this.speed = -1; list = new LinkedList<Integer>(); } } public static RangeProfile instance = new RangeProfile(GameConstant.profileDir.child("Screenshot_range")); private HashMap<Integer, DistanceLog> logs = new HashMap<Integer, DistanceLog>(); private boolean sessionLogged = false; private int pendingSpeed, pendingDistance; private boolean playerDamaged, bulletPassPlayer; private FileHandle ssDir; private int ssFilename; public RangeProfile(){ this(null); } public RangeProfile(FileHandle fh){ reset(null); ssDir = fh; if(ssDir != null){ ssFilename = ssDir.list().length; } } /** * Save previous profile (if available) to target directory and reset */ public void reset(FileHandle fh){ if(fh != null && logs.size() > 0){ save(fh.child("range_profile.txt")); } pendingSpeed = -1; pendingDistance = -1; playerDamaged = false; bulletPassPlayer = false; sessionLogged = false; logs.clear(); } public void preUpdate(LevelSnapshot snapshot){ if(snapshot.playerState.jumping && !sessionLogged){ String iden = snapshot.enemy.identifier; pendingSpeed = Integer.parseInt(iden.substring(iden.lastIndexOf("_")+1)); if(snapshot.tempRect.size() > 0){ Vector2 firstAttackPos = new Vector2(); snapshot.tempRect.values().iterator().next().getCenter(firstAttackPos); Vector2 playerPos = new Vector2(); snapshot.playerRect.getCenter(playerPos); pendingDistance = (int)firstAttackPos.dst(playerPos); } if(ssDir != null){ ScreenshotUtility.takeScreenshot(ssDir, ""+ssFilename); ssFilename++; } sessionLogged = true; } if(snapshot.player.getBaseHP() < snapshot.player.maxhp){ playerDamaged = true; } for(DynamicEntity dyn : snapshot.tempRect.keySet()){ if(snapshot.player.getWorldCollider().x - dyn.getWorldCollider().x >= 64){ bulletPassPlayer = true; } } } public boolean shouldEndSession(){ return playerDamaged || bulletPassPlayer; } /** * Call when current range profiling session should end. Return true if the player is damaged during the session * OR the session ends prematurely (end before enemy's first shoot) */ public boolean onSessionEnd(){ boolean isPlayerDamaged = playerDamaged; boolean isBulletPassPlayer = bulletPassPlayer; //Check condition, prevent logging when session ends prematurely if(sessionLogged && pendingSpeed > -1 && (isPlayerDamaged || isBulletPassPlayer)){ if(!logs.containsKey(pendingSpeed)){ logs.put(pendingSpeed, new DistanceLog(pendingSpeed)); } logs.get(pendingSpeed).add(pendingDistance, isPlayerDamaged); //System.out.println(logs.get(pendingSpeed)); } pendingSpeed = -1; pendingDistance = -1; playerDamaged = false; bulletPassPlayer = false; sessionLogged = false; return isPlayerDamaged || !isBulletPassPlayer; } public void print(){ for(DistanceLog log : logs.values()){ System.out.println(log); } System.out.println(">>>>>>>>>>>>>>>>>>>>>>"); } public void save(FileHandle fh){ ByteArrayOutputStream bos = new ByteArrayOutputStream(); ObjectOutput out = null; try { out = new ObjectOutputStream(bos); out.writeInt(logs.size()); for(DistanceLog log : logs.values()){ out.writeObject(log); } byte[] bytes = bos.toByteArray(); fh.writeBytes(bytes, false); } catch (IOException e) { e.printStackTrace(); } } /** * Calculate relevant range from all range profiles in specified directory */ public static int calculateRelevantRange(FileHandle dir){ float[] out = new float[1]; int sampleSize = calculateTotalDistance(dir, out); out[0] /= sampleSize; return (int)out[0]; } private static int calculateTotalDistance(FileHandle dir, float[] out){ int sampleSize = 0; for(FileHandle fh : dir.list()){ if(!fh.isDirectory()){ RangeProfile profile = new RangeProfile(); profile.load(fh); //profile.print(); int totalDistance = 0; int counter = 0; for(DistanceLog log : profile.logs.values()){ totalDistance += log.sum(); counter += log.sampleSize(); } out[0] += totalDistance; sampleSize += counter; }else{ sampleSize += calculateTotalDistance(fh, out); } } return sampleSize; } public void load(FileHandle fh){ byte[] data = fh.readBytes(); ByteArrayInputStream bis = new ByteArrayInputStream(data); ObjectInput in = null; try { in = new ObjectInputStream(bis); int remaining = in.readInt(); logs.clear(); while(remaining > 0){ logs.put(remaining, (DistanceLog)in.readObject()); remaining--; } } catch (Exception e) { System.err.println("Error loading range profile "+fh.nameWithoutExtension()); } } }
/* * Copyright 2015, The Querydsl Team (http://www.querydsl.com/team) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.querydsl.jpa.sql; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; import javax.annotation.Nullable; import javax.persistence.EntityManager; import javax.persistence.FlushModeType; import javax.persistence.LockModeType; import javax.persistence.Query; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.slf4j.MDC; import com.google.common.collect.HashMultimap; import com.google.common.collect.ListMultimap; import com.google.common.collect.Multimap; import com.google.common.collect.Sets; import com.mysema.commons.lang.CloseableIterator; import com.querydsl.core.*; import com.querydsl.core.types.Expression; import com.querydsl.core.types.FactoryExpression; import com.querydsl.jpa.AbstractSQLQuery; import com.querydsl.jpa.NativeSQLSerializer; import com.querydsl.jpa.QueryHandler; import com.querydsl.jpa.impl.JPAProvider; import com.querydsl.jpa.impl.JPAUtil; import com.querydsl.sql.Configuration; import com.querydsl.sql.SQLSerializer; /** * {@code AbstractJPASQLQuery} is the base class for JPA Native SQL queries * * @author tiwe * * @param <Q> */ public abstract class AbstractJPASQLQuery<T, Q extends AbstractJPASQLQuery<T, Q>> extends AbstractSQLQuery<T, Q> { private static final Logger logger = LoggerFactory.getLogger(AbstractJPASQLQuery.class); private final EntityManager entityManager; protected final Multimap<String,Object> hints = HashMultimap.create(); protected final QueryHandler queryHandler; @Nullable protected LockModeType lockMode; @Nullable protected FlushModeType flushMode; @Nullable protected FactoryExpression<?> projection; public AbstractJPASQLQuery(EntityManager em, Configuration configuration) { this(em, configuration, new DefaultQueryMetadata()); } public AbstractJPASQLQuery(EntityManager em, Configuration configuration, QueryHandler queryHandler) { this(em, configuration, queryHandler, new DefaultQueryMetadata()); } public AbstractJPASQLQuery(EntityManager em, Configuration configuration, QueryMetadata metadata) { this(em, configuration, JPAProvider.getTemplates(em).getQueryHandler(), metadata); } public AbstractJPASQLQuery(EntityManager em, Configuration configuration, QueryHandler queryHandler, QueryMetadata metadata) { super(metadata, configuration); this.entityManager = em; this.queryHandler = queryHandler; } public Query createQuery() { return createQuery(false); } private Query createQuery(boolean forCount) { NativeSQLSerializer serializer = (NativeSQLSerializer) serialize(forCount); String queryString = serializer.toString(); logQuery(queryString, serializer.getConstantToLabel()); Expression<?> projection = queryMixin.getMetadata().getProjection(); Query query; if (!FactoryExpression.class.isAssignableFrom(projection.getClass()) && isEntityExpression(projection)) { if (queryHandler.createNativeQueryTyped()) { query = entityManager.createNativeQuery(queryString, projection.getType()); } else { query = entityManager.createNativeQuery(queryString); } } else { query = entityManager.createNativeQuery(queryString); } if (!forCount) { ListMultimap<Expression<?>, String> aliases = serializer.getAliases(); Set<String> used = Sets.newHashSet(); if (projection instanceof FactoryExpression) { for (Expression<?> expr : ((FactoryExpression<?>) projection).getArgs()) { if (isEntityExpression(expr)) { queryHandler.addEntity(query, extractEntityExpression(expr).toString(), expr.getType()); } else if (aliases.containsKey(expr)) { for (String scalar : aliases.get(expr)) { if (!used.contains(scalar)) { queryHandler.addScalar(query, scalar, expr.getType()); used.add(scalar); break; } } } } } else if (isEntityExpression(projection)) { queryHandler.addEntity(query, extractEntityExpression(projection).toString(), projection.getType()); } else if (aliases.containsKey(projection)) { for (String scalar : aliases.get(projection)) { if (!used.contains(scalar)) { queryHandler.addScalar(query, scalar, projection.getType()); used.add(scalar); break; } } } } if (lockMode != null) { query.setLockMode(lockMode); } if (flushMode != null) { query.setFlushMode(flushMode); } for (Map.Entry<String, Object> entry : hints.entries()) { query.setHint(entry.getKey(), entry.getValue()); } // set constants JPAUtil.setConstants(query, serializer.getConstantToLabel(), queryMixin.getMetadata().getParams()); this.projection = null; // necessary when query is reused if (projection instanceof FactoryExpression) { if (!queryHandler.transform(query, (FactoryExpression<?>) projection)) { this.projection = (FactoryExpression<?>) projection; } } return query; } @Override protected SQLSerializer createSerializer() { return new NativeSQLSerializer(configuration, queryHandler.wrapEntityProjections()); } /** * Transforms results using FactoryExpression if ResultTransformer can't be used * * @param query query * @return results */ private List<?> getResultList(Query query) { // TODO : use lazy fetch here? if (projection != null) { List<?> results = query.getResultList(); List<Object> rv = new ArrayList<Object>(results.size()); for (Object o : results) { if (o != null) { Object[] arr; if (!o.getClass().isArray()) { arr = new Object[]{o}; } else { arr = (Object[]) o; } if (projection.getArgs().size() < arr.length) { Object[] shortened = new Object[projection.getArgs().size()]; System.arraycopy(arr, 0, shortened, 0, shortened.length); arr = shortened; } rv.add(projection.newInstance(arr)); } else { rv.add(null); } } return rv; } else { return query.getResultList(); } } /** * Transforms results using FactoryExpression if ResultTransformer can't be used * * @param query query * @return single result */ @Nullable private Object getSingleResult(Query query) { if (projection != null) { Object result = query.getSingleResult(); if (result != null) { if (!result.getClass().isArray()) { result = new Object[]{result}; } return projection.newInstance((Object[]) result); } else { return null; } } else { return query.getSingleResult(); } } @SuppressWarnings("unchecked") @Override public List<T> fetch() { try { Query query = createQuery(); return (List<T>) getResultList(query); } finally { reset(); } } @Override public CloseableIterator<T> iterate() { try { Query query = createQuery(); return queryHandler.iterate(query, null); } finally { reset(); } } @Override public QueryResults<T> fetchResults() { // TODO : handle entity projections as well try { Query query = createQuery(true); long total = ((Number) query.getSingleResult()).longValue(); if (total > 0) { QueryModifiers modifiers = queryMixin.getMetadata().getModifiers(); query = createQuery(false); @SuppressWarnings("unchecked") List<T> list = (List<T>) getResultList(query); return new QueryResults<T>(list, modifiers, total); } else { return QueryResults.emptyResults(); } } finally { reset(); } } protected void logQuery(String queryString, Map<Object, String> parameters) { String normalizedQuery = queryString.replace('\n', ' '); MDC.put(MDC_QUERY, normalizedQuery); MDC.put(MDC_PARAMETERS, String.valueOf(parameters)); if (logger.isDebugEnabled()) { logger.debug(normalizedQuery); } } protected void cleanupMDC() { MDC.remove(MDC_QUERY); MDC.remove(MDC_PARAMETERS); } protected void reset() { queryMixin.getMetadata().reset(); cleanupMDC(); } @Override @SuppressWarnings("unchecked") public T fetchOne() { Query query = createQuery(); return (T) uniqueResult(query); } @Nullable private Object uniqueResult(Query query) { try { return getSingleResult(query); } catch (javax.persistence.NoResultException e) { logger.trace(e.getMessage(),e); return null; } catch (javax.persistence.NonUniqueResultException e) { throw new NonUniqueResultException(); } finally { reset(); } } @SuppressWarnings("unchecked") public Q setLockMode(LockModeType lockMode) { this.lockMode = lockMode; return (Q) this; } @SuppressWarnings("unchecked") public Q setFlushMode(FlushModeType flushMode) { this.flushMode = flushMode; return (Q) this; } @SuppressWarnings("unchecked") public Q setHint(String name, Object value) { hints.put(name, value); return (Q) this; } @Override protected void clone(Q query) { super.clone(query); flushMode = query.flushMode; hints.putAll(query.hints); lockMode = query.lockMode; projection = query.projection; } public abstract Q clone(EntityManager entityManager); @Override public Q clone() { return this.clone(this.entityManager); } }
package lucene.engine; import java.io.FileNotFoundException; import java.io.IOException; import java.nio.file.NoSuchFileException; import java.util.HashMap; import java.util.Map; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.locks.Lock; import lucene.IActionQueue; import lucene.action.DeleteAction; import lucene.action.IndexAction; import lucene.action.TruncateAction; import lucene.spec.IndexSpec; import org.apache.lucene.document.Document; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; import play.Logger; /** * Base class for stand-alone (aka single-server) indices. * * @author ThanhNB * @since 0.1.0 */ public class StandaloneIndex extends AbstractIndex { public static StandaloneIndex create(Directory directory, IndexSpec spec, IActionQueue actionQueue) throws IOException { if (directory != null && spec != null) { StandaloneIndex index = new StandaloneIndex(directory, spec, actionQueue); index.init(); return index; } return null; } public StandaloneIndex(Directory directory, IndexSpec spec, IActionQueue actionQueue) { super(directory, spec, actionQueue); } /** * {@inheritDoc} */ @Override public StandaloneIndex init() throws IOException { super.init(); commitThread = new CommitThread(getName()); commitThread.start(); return this; } public void destroy() { if (commitThread != null) { try { commitThread.stopExecution(); } catch (Exception e) { // EMPTY } finally { commitThread = null; } } super.destroy(); } private AtomicLong uncommitActions = new AtomicLong(0); private final class CommitThread extends Thread { private boolean running = true; public CommitThread(String indexName) { super("CommitThread - " + indexName); setDaemon(true); } public void stopExecution() { this.running = false; } public void run() { while (running && !isInterrupted()) { try { doCommit(); Thread.sleep(1000); } catch (Exception e) { } } } } /*----------------------------------------------------------------------*/ private CommitThread commitThread; /** * {@inheritDoc} */ @Override protected IndexWriter openIndexWriter() throws IOException { Directory directory = getDirectory(); try { if (directory.fileLength(IndexWriter.WRITE_LOCK_NAME) >= 0) { directory.deleteFile(IndexWriter.WRITE_LOCK_NAME); } } catch (FileNotFoundException | NoSuchFileException e) { } return super.openIndexWriter(); } /** * Perform index commit. * * @throws IOException */ protected void doCommit() throws IOException { if (uncommitActions.get() > 0) { Lock lock = getWriteLock(); lock.lock(); try { long numDocs = uncommitActions.get(); IndexWriter iw = getIndexWriter(); if (Logger.isDebugEnabled()) { Map<String, Object> stats = new HashMap<String, Object>(); stats.put("docs", iw.numDocs()); stats.put("del", iw.hasDeletions()); stats.put("pending", iw.hasPendingMerges()); stats.put("change", iw.hasUncommittedChanges()); Logger.debug("[" + getName() + "] committing " + numDocs + (numDocs > 1 ? " changes: " : " change: ") + stats); } iw.commit(); if (Logger.isDebugEnabled()) { Map<String, Object> stats = new HashMap<String, Object>(); stats.put("docs", iw.numDocs()); stats.put("del", iw.hasDeletions()); stats.put("pending", iw.hasPendingMerges()); stats.put("change", iw.hasUncommittedChanges()); Logger.debug("\tafter commit: " + stats); } uncommitActions.set(0); } finally { lock.unlock(); } } } /** * {@inheritDoc} */ @SuppressWarnings("unused") @Override protected boolean performIndexAction(IndexAction action) throws IOException { Map<String, Object> docData = action.doc(); Document doc = buildDocument(docData); if (doc != null) { Lock lock = getReadLock(); lock.lock(); try { IndexWriter iw = getIndexWriter(); Query queryForDeletion = buildQueryForDeletion(docData); if (queryForDeletion != null) { iw.deleteDocuments(queryForDeletion); } iw.addDocument(doc); long value = uncommitActions.incrementAndGet(); return true; } finally { lock.unlock(); } } return false; } /** * {@inheritDoc} */ @SuppressWarnings("unused") @Override protected boolean performDeleteAction(DeleteAction action) throws IOException { Lock lock = getReadLock(); lock.lock(); try { IndexWriter iw = getIndexWriter(); switch (action.deleteMethod()) { case DeleteAction.DELETE_METHOD_TERM: { Query queryForDeletion = buildQueryForDeletion(action.term()); if (queryForDeletion != null) { iw.deleteDocuments(queryForDeletion); long value = uncommitActions.incrementAndGet(); return true; } } default: { Query queryForDeletion = parseQuery(action.query()); if (queryForDeletion != null) { iw.deleteDocuments(queryForDeletion); long value = uncommitActions.incrementAndGet(); return true; } } } } finally { lock.unlock(); } return false; } /** * {@inheritDoc} */ @SuppressWarnings("unused") @Override protected boolean performTruncateAction(TruncateAction action) throws IOException { Lock lock = getReadLock(); lock.lock(); try { IndexWriter iw = getIndexWriter(); iw.deleteAll(); long value = uncommitActions.incrementAndGet(); return true; } finally { lock.unlock(); } } }
package data.structure; import java.util.Comparator; import java.util.Iterator; import java.util.NoSuchElementException; /** * The <tt>MinPQ</tt> class represents a priority queue of generic keys. * It supports the usual <em>insert</em> and <em>delete-the-minimum</em> * operations, along with methods for peeking at the minimum key, * testing if the priority queue is empty, and iterating through * the keys. * <p> * This implementation uses a binary heap. * The <em>insert</em> and <em>delete-the-minimum</em> operations take * logarithmic amortized time. * The <em>min</em>, <em>size</em>, and <em>is-empty</em> operations take constant time. * Construction takes time proportional to the specified capacity or the number of * items used to initialize the data structure. * <p> * For additional documentation, see <a href="http://algs4.cs.princeton.edu/24pq">Section 2.4</a> of * <i>Algorithms, 4th Edition</i> by Robert Sedgewick and Kevin Wayne. * * @author Robert Sedgewick * @author Kevin Wayne * @param <Key> */ public class MinPQ<Key extends Comparable<Key>> implements IPriorityQueue<Key>, Iterable<Key> { private Key[] pq; // store items at indices 1 to N private int N; // number of items on priority queue private Comparator<Key> comparator; // optional comparator /** * Initializes an empty priority queue with the given initial capacity. * @param initCapacity the initial capacity of the priority queue */ public MinPQ(int initCapacity) { pq = (Key[]) new Object[initCapacity + 1]; N = 0; } /** * Initializes an empty priority queue. */ public MinPQ() { this(1); } /** * Initializes an empty priority queue with the given initial capacity, * using the given comparator. * @param initCapacity the initial capacity of the priority queue * @param comparator the order to use when comparing keys */ public MinPQ(int initCapacity, Comparator<Key> comparator) { this.comparator = comparator; pq = (Key[]) new Object[initCapacity + 1]; N = 0; } /** * Initializes an empty priority queue using the given comparator. * @param comparator the order to use when comparing keys */ public MinPQ(Comparator<Key> comparator) { this(1, comparator); } /** * Initializes a priority queue from the array of keys. * Takes time proportional to the number of keys, using sink-based heap construction. * @param keys the array of keys */ public MinPQ(Key[] keys) { N = keys.length; pq = (Key[]) new Object[keys.length + 1]; System.arraycopy(keys, 0, pq, 1, N); for (int k = N/2; k >= 1; k--) sink(k); assert isMinHeap(); } @Override public boolean isEmpty() { return N == 0; } @Override public int size() { return N; } // helper function to double the size of the heap array private void resize(int capacity) { assert capacity > N; Key[] temp = (Key[]) new Object[capacity]; for (int i = 1; i <= N; i++) temp[i] = pq[i]; pq = temp; } /*********************************************************************** * Helper functions to restore the heap invariant. **********************************************************************/ private void swim(int k) { while (k > 1 && greater(k/2, k)) { exch(k, k/2); k = k/2; } } private void sink(int k) { while (2*k <= N) { int j = 2*k; if (j < N && greater(j, j+1)) j++; if (!greater(k, j)) break; exch(k, j); k = j; } } /*********************************************************************** * Helper functions for compares and swaps. **********************************************************************/ private boolean greater(int i, int j) { if (comparator == null) { return ((Comparable<Key>) pq[i]).compareTo(pq[j]) > 0; } else { return comparator.compare(pq[i], pq[j]) > 0; } } private void exch(int i, int j) { Key swap = pq[i]; pq[i] = pq[j]; pq[j] = swap; } // is pq[1..N] a min heap? private boolean isMinHeap() { return isMinHeap(1); } // is subtree of pq[1..N] rooted at k a min heap? private boolean isMinHeap(int k) { if (k > N) return true; int left = 2*k, right = 2*k + 1; if (left <= N && greater(k, left)) return false; if (right <= N && greater(k, right)) return false; return isMinHeap(left) && isMinHeap(right); } /*********************************************************************** * Iterators **********************************************************************/ /** * Returns an iterator that iterates over the keys on the priority queue * in ascending order. * The iterator doesn't implement <tt>remove()</tt> since it's optional. * @return an iterator that iterates over the keys in ascending order */ @Override public Iterator<Key> iterator() { return new HeapIterator(); } @Override public void enqueue(Key value) { // double size of array if necessary if (N == pq.length - 1) resize(2 * pq.length); // add x, and percolate it up to maintain heap invariant pq[++N] = value; swim(N); assert isMinHeap(); } @Override public Key dequeue() throws NoSuchElementException { if (isEmpty()) throw new NoSuchElementException("Priority queue underflow"); exch(1, N); Key min = pq[N--]; sink(1); pq[N+1] = null; // avoid loitering and help with garbage collection if ((N > 0) && (N == (pq.length - 1) / 4)) resize(pq.length / 2); assert isMinHeap(); return min; } @Override public Key peek() throws NoSuchElementException { if (isEmpty()) throw new NoSuchElementException("Priority queue underflow"); return pq[1]; } private class HeapIterator implements Iterator<Key> { // create a new pq private final MinPQ<Key> copy; // add all items to copy of heap // takes linear time since already in heap order so no keys move public HeapIterator() { if (comparator == null) copy = new MinPQ<>(size()); else copy = new MinPQ<>(size(), comparator); for (int i = 1; i <= N; i++) copy.enqueue(pq[i]); } @Override public boolean hasNext() { return !copy.isEmpty(); } @Override public void remove() { throw new UnsupportedOperationException(); } @Override public Key next() { if (!hasNext()) throw new NoSuchElementException(); return copy.dequeue(); } } }
/* * NOTE: This copyright does *not* cover user programs that use HQ * program services by normal system calls through the application * program interfaces provided as part of the Hyperic Plug-in Development * Kit or the Hyperic Client Development Kit - this is merely considered * normal use of the program, and does *not* fall under the heading of * "derived work". * * Copyright (C) [2004-2007], Hyperic, Inc. * This file is part of HQ. * * HQ is free software; you can redistribute it and/or modify * it under the terms version 2 of the GNU General Public License as * published by the Free Software Foundation. This program is distributed * in the hope that it will be useful, but WITHOUT ANY WARRANTY; without * even the implied warranty of MERCHANTABILITY or FITNESS FOR A * PARTICULAR PURPOSE. See the GNU General Public License for more * details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 * USA. */ package org.hyperic.hq.bizapp.server.session; import java.util.Iterator; import java.util.List; import java.util.Map; import javax.security.auth.login.LoginException; import org.hyperic.hq.appdef.shared.AppdefEntityConstants; import org.hyperic.hq.appdef.shared.AppdefEntityID; import org.hyperic.hq.appdef.shared.AppdefEntityNotFoundException; import org.hyperic.hq.appdef.shared.AppdefEntityTypeID; import org.hyperic.hq.appdef.shared.AppdefGroupNotFoundException; import org.hyperic.hq.appdef.shared.AppdefManager; import org.hyperic.hq.appdef.shared.AppdefResourceValue; import org.hyperic.hq.auth.shared.SessionException; import org.hyperic.hq.auth.shared.SessionManager; import org.hyperic.hq.auth.shared.SessionNotFoundException; import org.hyperic.hq.auth.shared.SessionTimeoutException; import org.hyperic.hq.authz.server.session.AuthzSubject; import org.hyperic.hq.authz.shared.PermissionException; import org.hyperic.hq.bizapp.shared.AppdefBoss; import org.hyperic.hq.bizapp.shared.AuthBoss; import org.hyperic.hq.bizapp.shared.ControlBoss; import org.hyperic.hq.common.ApplicationException; import org.hyperic.hq.control.server.session.ControlHistory; import org.hyperic.hq.control.server.session.ControlSchedule; import org.hyperic.hq.control.shared.ControlFrequencyValue; import org.hyperic.hq.control.shared.ControlManager; import org.hyperic.hq.control.shared.ControlScheduleManager; import org.hyperic.hq.control.shared.ScheduledJobNotFoundException; import org.hyperic.hq.control.shared.ScheduledJobRemoveException; import org.hyperic.hq.grouping.shared.GroupNotCompatibleException; import org.hyperic.hq.product.PluginException; import org.hyperic.hq.product.PluginNotFoundException; import org.hyperic.hq.scheduler.ScheduleValue; import org.hyperic.util.pager.PageControl; import org.hyperic.util.pager.PageList; import org.quartz.SchedulerException; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; @Service @Transactional public class ControlBossImpl implements ControlBoss { private AppdefBoss appdefBoss; private SessionManager sessionManager; private AppdefManager appdefManager; private AuthBoss authBoss; private ControlManager controlManager; private ControlScheduleManager controlScheduleManager; @Autowired public ControlBossImpl(SessionManager sessionManager, AppdefManager appdefManager, AuthBoss authBoss, ControlManager controlManager, ControlScheduleManager controlScheduleManager, AppdefBoss appdefBoss) { this.appdefManager = appdefManager; this.authBoss = authBoss; this.controlManager = controlManager; this.controlScheduleManager = controlScheduleManager; this.sessionManager = sessionManager; this.appdefBoss = appdefBoss; } /** * Execute a control action immediately on an appdef entity. * * @param action The action to perform */ public void doAction(int sessionId, AppdefEntityID id, String action, String args) throws PluginException, GroupNotCompatibleException, SessionNotFoundException, SessionTimeoutException, PermissionException, AppdefEntityNotFoundException { if (id.isGroup()) { doGroupAction(sessionId, id, action, args, null); } else { AuthzSubject subject = sessionManager.getSubject(sessionId); controlManager.doAction(subject, id, action, args); } } /** * Execute a control action * * This is used for doing scheduled control actions. * * @param controlJob The control job action name * @param schedule The control job schedule */ public void doAction(int sessionId, AppdefEntityID id, String action, ScheduleValue schedule) throws PluginException, SchedulerException, SessionNotFoundException, SessionTimeoutException, PermissionException, AppdefEntityNotFoundException, GroupNotCompatibleException, ApplicationException { if (id.isGroup()) { doGroupAction(sessionId, id, action, null, schedule); } else { AuthzSubject subject = sessionManager.getSubject(sessionId); controlManager.scheduleAction(subject, id, action, schedule); } } /** * Schedule a control action on a group entity. * * @param action The action to perform */ public void doGroupAction(int sessionId, AppdefEntityID groupEnt, String action, int[] orderSpec, ScheduleValue schedule) throws PluginException, SchedulerException, SessionNotFoundException, SessionTimeoutException, PermissionException, AppdefEntityNotFoundException, GroupNotCompatibleException, ApplicationException { AuthzSubject subject = sessionManager.getSubject(sessionId); controlManager.scheduleGroupAction(subject, groupEnt, action, orderSpec, schedule); } /** * Execute a control action immediately on a group entity. * * @param action The action to perform */ public void doGroupAction(int sessionId, AppdefEntityID groupEnt, String action, String args, int[] orderSpec) throws PluginException, GroupNotCompatibleException, SessionNotFoundException, SessionTimeoutException, PermissionException, AppdefEntityNotFoundException { AuthzSubject subject = sessionManager.getSubject(sessionId); controlManager.doGroupAction(subject, groupEnt, action, args, orderSpec); } /** * Get the actions supported for an appdef entity */ @Transactional(readOnly=true) public List<String> getActions(int sessionId, AppdefEntityID id) throws PluginNotFoundException, AppdefEntityNotFoundException, SessionNotFoundException, SessionTimeoutException, PermissionException, GroupNotCompatibleException { AuthzSubject subject = sessionManager.getSubject(sessionId); return controlManager.getActions(subject, id); } /** * Get the actions supported for an appdef entity type */ @Transactional(readOnly=true) public List<String> getActions(int sessionId, AppdefEntityTypeID aetid) throws PluginNotFoundException, PermissionException, SessionNotFoundException, SessionTimeoutException { AuthzSubject subject = sessionManager.getSubject(sessionId); return controlManager.getActions(subject, aetid); } /** * Check if a group has been enabled for control */ @Transactional(readOnly=true) public boolean isGroupControlEnabled(int sessionId, AppdefEntityID id) throws AppdefEntityNotFoundException, PermissionException, SessionNotFoundException, SessionTimeoutException { AuthzSubject subject = sessionManager.getSubject(sessionId); return controlManager.isGroupControlEnabled(subject, id); } /** * Check if the entity's resource supports control */ @Transactional(readOnly=true) public boolean isControlSupported(int sessionId, AppdefResourceValue res) throws SessionNotFoundException, SessionTimeoutException { AuthzSubject subject = sessionManager.getSubject(sessionId); return controlManager .isControlSupported(subject, res.getEntityId(), res.getAppdefResourceTypeValue().getName()); } /** * Check if the entity's resource supports control */ @Transactional(readOnly=true) public boolean isControlSupported(int sessionId, AppdefEntityTypeID tid) throws SessionNotFoundException, SessionTimeoutException { AuthzSubject subject = sessionManager.getSubject(sessionId); return controlManager.isControlSupported(subject, tid.getAppdefResourceType().getName()); } /** * Check if anything has been enabled for control * */ @Transactional(readOnly=true) public boolean isControlEnabled(int sessionId) throws SessionNotFoundException, SessionTimeoutException, PermissionException { AuthzSubject subject = sessionManager.getSubject(sessionId); Map<String, AppdefEntityID> platTypes = appdefManager.getControllablePlatformTypes(subject); if (platTypes.size() > 0) { return true; } Map<String, AppdefEntityTypeID> svrTypes = appdefManager.getControllableServerTypes(subject); if (svrTypes.size() > 0) { return true; } Map<String, AppdefEntityTypeID> svcTypes = appdefManager.getControllableServiceTypes(subject); return (svcTypes.size() > 0); } /** * Check if an entity has been enabled for control */ @Transactional(readOnly=true) public boolean isControlEnabled(int sessionId, AppdefEntityID id) throws AppdefEntityNotFoundException, SessionNotFoundException, SessionTimeoutException, PermissionException { AuthzSubject subject = sessionManager.getSubject(sessionId); return controlManager.isControlEnabled(subject, id); } /** * Finder for all of the scheduled jobs for an appdef entity. * * @return List of scheduled actions */ @Transactional(readOnly=true) public PageList<ControlSchedule> findScheduledJobs(int sessionId, AppdefEntityID id, PageControl pc) throws PluginException, ScheduledJobNotFoundException, SessionNotFoundException, SessionTimeoutException, PermissionException { AuthzSubject subject = sessionManager.getSubject(sessionId); return controlScheduleManager.findScheduledJobs(subject, id, pc); } /** * Remove all of the scheduled jobs for an appdef entity. */ public void removeScheduledJobs(int sessionId, AppdefEntityID id) throws SessionNotFoundException, SessionTimeoutException, ScheduledJobRemoveException { AuthzSubject subject = sessionManager.getSubject(sessionId); controlScheduleManager.removeScheduledJobs(subject, id); } /** * Get a job history based on appdef id * * @TODO Implement page controls, Authz integration */ @Transactional(readOnly=true) public PageList<ControlHistory> findJobHistory(int sessionId, AppdefEntityID id, PageControl pc) throws PluginException, ApplicationException, PermissionException, SessionNotFoundException, SessionTimeoutException { AuthzSubject subject = sessionManager.getSubject(sessionId); return controlScheduleManager.findJobHistory(subject, id, pc); } /** * Group job history detail on group appdef id */ @Transactional(readOnly=true) public PageList<ControlHistory> findGroupJobHistory(int sessionId, AppdefEntityID id, int batchJobId, PageControl pc) throws PluginException, ApplicationException, SessionNotFoundException, SessionTimeoutException, PermissionException, AppdefGroupNotFoundException { if (!id.isGroup()) { throw new IllegalArgumentException("Invalid group entity specified"); } AuthzSubject subject = sessionManager.getSubject(sessionId); return controlScheduleManager.findGroupJobHistory(subject, batchJobId, id, pc); } /** * Remove an entry from the control history * * @TODO Authz integration */ public void deleteJobHistory(int sessionId, Integer[] ids) throws ApplicationException, PermissionException, SessionNotFoundException, SessionTimeoutException { AuthzSubject subject = sessionManager.getSubject(sessionId); controlScheduleManager.deleteJobHistory(subject, ids); } /** * Obtain the current action that is being executed. If there is no current * running action, null is returned. * * @return currently running ControlJob. */ @Transactional(readOnly=true) public ControlHistory getCurrentJob(int sessionId, AppdefEntityID id) throws ApplicationException, PermissionException, SessionNotFoundException, SessionTimeoutException { AuthzSubject subject = sessionManager.getSubject(sessionId); return controlScheduleManager.getCurrentJob(subject, id); } /** * Obtain a control action based on job id * * @return last ControlJob that ran */ @Transactional(readOnly=true) public ControlHistory getJobByJobId(int sessionId, Integer id) throws ApplicationException, PermissionException, SessionNotFoundException, SessionTimeoutException { AuthzSubject subject = sessionManager.getSubject(sessionId); return controlScheduleManager.getJobByJobId(subject, id); } /** * Obtain the last control action that fired * * @return last ControlJob that ran */ @Transactional(readOnly=true) public ControlHistory getLastJob(int sessionId, AppdefEntityID id) throws ApplicationException, PermissionException, SessionNotFoundException, SessionTimeoutException { AuthzSubject subject = sessionManager.getSubject(sessionId); return controlScheduleManager.getLastJob(subject, id); } /** * Obtain a ControlJob based on an id * * @param triggerName The control trigger name * * @return The control job that was requested */ @Transactional(readOnly=true) public ControlSchedule getControlJob(int sessionId, Integer id) throws PluginException, ApplicationException, PermissionException, SessionNotFoundException, SessionTimeoutException { AuthzSubject subject = sessionManager.getSubject(sessionId); return controlScheduleManager.getControlJob(subject, id); } /** * Delete a ControlJob based on an id * * @param ids Array of job ids to be deleted */ public void deleteControlJob(int sessionId, Integer[] ids) throws PluginException, ApplicationException, PermissionException, SessionNotFoundException, SessionTimeoutException { AuthzSubject subject = sessionManager.getSubject(sessionId); controlScheduleManager.deleteControlJob(subject, ids); } // Dashboard routines /** * Get a list of recent control actions in decending order */ @Transactional(readOnly=true) public PageList<ControlHistory> getRecentControlActions(int sessionId, int rows, long window) throws ApplicationException, PermissionException, SessionNotFoundException, SessionTimeoutException { AuthzSubject subject = sessionManager.getSubject(sessionId); return controlScheduleManager.getRecentControlActions(subject, rows, window); } /** * Get a list of recent control actions in decending order. Called by RSS * feed so it does not require valid session ID. * * @throws ApplicationException if user is not found * @throws LoginException if user account has been disabled */ @Transactional(readOnly=true) public PageList<ControlHistory> getRecentControlActions(String user, int rows, long window) throws LoginException, ApplicationException { int sessionId = authBoss.getUnauthSessionId(user); return getRecentControlActions(sessionId, rows, window); } /** * Get a list of pending control actions in decending order */ @Transactional(readOnly=true) public PageList<ControlSchedule> getPendingControlActions(int sessionId, int rows) throws ApplicationException, PermissionException, SessionNotFoundException, SessionTimeoutException { AuthzSubject subject = sessionManager.getSubject(sessionId); return controlScheduleManager.getPendingControlActions(subject, rows); } /** * Get a list of most active control operations */ @Transactional(readOnly=true) public PageList<ControlFrequencyValue> getOnDemandControlFrequency(int sessionId, int numToReturn) throws ApplicationException, PermissionException, ApplicationException, SessionNotFoundException, SessionTimeoutException { AuthzSubject subject = sessionManager.getSubject(sessionId); return controlScheduleManager.getOnDemandControlFrequency(subject, numToReturn); } /** * Accept an array of appdef entity Ids and verify control permission on * each entity for specified subject. Return an array containing the set or * subset of entities where subject has control authorization. * * @return List of entities that are control authorized. */ public List<AppdefEntityID> batchCheckControlPermissions(int sessionId, AppdefEntityID[] entities) throws AppdefEntityNotFoundException, PermissionException, SessionNotFoundException, SessionTimeoutException { AuthzSubject subject = sessionManager.getSubject(sessionId); return controlManager.batchCheckControlPermissions(subject, entities); } /** * Find types of all controllable platforms defined in the system. * * @return A map of PlatformType names and AppdefEntityTypeIDs. * @throws PermissionException */ @Transactional(readOnly=true) public Map<String, AppdefEntityID> findControllablePlatformTypes(int sessionID) throws SessionNotFoundException, SessionTimeoutException, PermissionException { AuthzSubject subject = sessionManager.getSubject(sessionID); return appdefManager.getControllablePlatformTypes(subject); } /** * Find types of all controllable servers defined in the system. * * @return A map of ServerType names and AppdefEntityTypeIDs. * @throws PermissionException */ @Transactional(readOnly=true) public Map<String, AppdefEntityTypeID> findControllableServerTypes(int sessionID) throws SessionNotFoundException, SessionTimeoutException, PermissionException { AuthzSubject subject = sessionManager.getSubject(sessionID); return appdefManager.getControllableServerTypes(subject); } /** * Find types of all controllable services defined in the system. * * @return A map of ServiceType names and AppdefEntityTypeIDs. * @throws PermissionException */ @Transactional(readOnly=true) public Map<String, AppdefEntityTypeID> findControllableServiceTypes(int sessionID) throws SessionNotFoundException, SessionTimeoutException, PermissionException { AuthzSubject subject = sessionManager.getSubject(sessionID); return appdefManager.getControllableServiceTypes(subject); } /** * Find names of all controllable resources of a given type. * * @return A map of Service names and AppdefEntityIDs. * @throws PermissionException */ @Transactional(readOnly=true) public Map<String, AppdefEntityID> findControllableResourceNames(int sessionID, AppdefEntityTypeID aetid) throws SessionNotFoundException, SessionException, PermissionException { AuthzSubject subject = sessionManager.getSubject(sessionID); Map<String, AppdefEntityID> result; int groupType; // Return based on type switch (aetid.getType()) { case AppdefEntityConstants.APPDEF_TYPE_PLATFORM: result = appdefManager.getControllablePlatformNames(subject, aetid.getID()); groupType = AppdefEntityConstants.APPDEF_TYPE_GROUP_COMPAT_PS; break; case AppdefEntityConstants.APPDEF_TYPE_SERVER: result = appdefManager.getControllableServerNames(subject, aetid.getID()); groupType = AppdefEntityConstants.APPDEF_TYPE_GROUP_COMPAT_PS; break; case AppdefEntityConstants.APPDEF_TYPE_SERVICE: result = appdefManager.getControllableServiceNames(subject, aetid.getID()); groupType = AppdefEntityConstants.APPDEF_TYPE_GROUP_COMPAT_SVC; break; default: throw new IllegalArgumentException("Unsupported appdef type " + aetid.getType()); } try { // Get the controllable groups, too List<AppdefResourceValue> groups = appdefBoss.findCompatInventory(sessionID, groupType, AppdefEntityConstants.APPDEF_TYPE_GROUP, aetid.getType(), aetid.getID(), null, null, PageControl.PAGE_ALL); for (Iterator<AppdefResourceValue> i = groups.iterator(); i.hasNext();) { AppdefResourceValue group = i.next(); if (isControlSupported(sessionID, group)) { result.put(group.getName(), group.getEntityId()); } } } catch (AppdefEntityNotFoundException e) { // Nothing to worry about } catch (PermissionException e) { // Nothing to worry about } return result; } }
/** * Licensed to DigitalPebble Ltd under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * DigitalPebble licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.digitalpebble.storm.crawler.bolt; import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; import java.text.SimpleDateFormat; import java.util.Locale; import java.util.Map; import backtype.storm.metric.api.MeanReducer; import backtype.storm.metric.api.MultiReducedMetric; import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import backtype.storm.Config; import backtype.storm.Constants; import backtype.storm.metric.api.MultiCountMetric; import backtype.storm.task.OutputCollector; import backtype.storm.task.TopologyContext; import backtype.storm.topology.OutputFieldsDeclarer; import backtype.storm.topology.base.BaseRichBolt; import backtype.storm.tuple.Fields; import backtype.storm.tuple.Tuple; import backtype.storm.tuple.Values; import backtype.storm.utils.Utils; import com.digitalpebble.storm.crawler.Metadata; import com.digitalpebble.storm.crawler.filtering.URLFilters; import com.digitalpebble.storm.crawler.persistence.Status; import com.digitalpebble.storm.crawler.protocol.HttpHeaders; import com.digitalpebble.storm.crawler.protocol.Protocol; import com.digitalpebble.storm.crawler.protocol.ProtocolFactory; import com.digitalpebble.storm.crawler.protocol.ProtocolResponse; import com.digitalpebble.storm.crawler.util.ConfUtils; import com.digitalpebble.storm.crawler.util.MetadataTransfer; import com.digitalpebble.storm.crawler.util.URLUtil; import crawlercommons.robots.BaseRobotRules; /** * A single-threaded fetcher with no internal queue. Use of this fetcher * requires that the user implement an external queue that enforces crawl-delay * politeness constraints. */ public class SimpleFetcherBolt extends BaseRichBolt { private static final Logger LOG = LoggerFactory .getLogger(SimpleFetcherBolt.class); private Config conf; private OutputCollector _collector; private MultiCountMetric eventCounter; private MultiReducedMetric averagedMetrics; private ProtocolFactory protocolFactory; private URLFilters urlFilters; private MetadataTransfer metadataTransfer; private int taskIndex = -1; private boolean allowRedirs; private void checkConfiguration() { // ensure that a value has been set for the agent name and that that // agent name is the first value in the agents we advertise for robot // rules parsing String agentName = (String) getConf().get("http.agent.name"); if (agentName == null || agentName.trim().length() == 0) { String message = "Fetcher: No agents listed in 'http.agent.name'" + " property."; LOG.error(message); throw new IllegalArgumentException(message); } } private Config getConf() { return this.conf; } @Override public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) { _collector = collector; this.conf = new Config(); this.conf.putAll(stormConf); checkConfiguration(); SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss", Locale.ENGLISH); long start = System.currentTimeMillis(); LOG.info("[Fetcher #{}] : starting at {}", taskIndex, sdf.format(start)); // Register a "MultiCountMetric" to count different events in this bolt // Storm will emit the counts every n seconds to a special bolt via a // system stream // The data can be accessed by registering a "MetricConsumer" in the // topology this.eventCounter = context.registerMetric("fetcher_counter", new MultiCountMetric(), 10); this.averagedMetrics = context.registerMetric("fetcher_average", new MultiReducedMetric(new MeanReducer()), 10); protocolFactory = new ProtocolFactory(conf); this.taskIndex = context.getThisTaskIndex(); String urlconfigfile = ConfUtils.getString(conf, "urlfilters.config.file", "urlfilters.json"); if (urlconfigfile != null) try { urlFilters = new URLFilters(conf, urlconfigfile); } catch (IOException e) { LOG.error("Exception caught while loading the URLFilters"); throw new RuntimeException( "Exception caught while loading the URLFilters", e); } metadataTransfer = MetadataTransfer.getInstance(stormConf); allowRedirs = ConfUtils.getBoolean(stormConf, com.digitalpebble.storm.crawler.Constants.AllowRedirParamName, true); } @Override public void declareOutputFields(OutputFieldsDeclarer declarer) { declarer.declare(new Fields("url", "content", "metadata")); declarer.declareStream( com.digitalpebble.storm.crawler.Constants.StatusStreamName, new Fields("url", "metadata", "status")); } @Override public void execute(Tuple input) { if (!input.contains("url")) { LOG.info("[Fetcher #{}] Missing field url in tuple {}", taskIndex, input); // ignore silently _collector.ack(input); return; } String urlString = input.getStringByField("url"); // has one but what about the content? if (StringUtils.isBlank(urlString)) { LOG.info("[Fetcher #{}] Missing value for field url in tuple {}", taskIndex, input); // ignore silently _collector.ack(input); return; } Metadata metadata = null; if (input.contains("metadata")) metadata = (Metadata) input.getValueByField("metadata"); if (metadata == null) metadata = Metadata.empty; URL url; try { url = new URL(urlString); } catch (MalformedURLException e) { LOG.error("{} is a malformed URL", urlString); // ignore silently _collector.ack(input); return; } try { Protocol protocol = protocolFactory.getProtocol(url); BaseRobotRules rules = protocol.getRobotRules(urlString); if (!rules.isAllowed(urlString)) { LOG.info("Denied by robots.txt: {}", urlString); // Report to status stream and ack _collector .emit(com.digitalpebble.storm.crawler.Constants.StatusStreamName, input, new Values(urlString, metadata, Status.ERROR)); _collector.ack(input); return; } long start = System.currentTimeMillis(); ProtocolResponse response = protocol.getProtocolOutput(urlString, metadata); averagedMetrics.scope("fetch_time").update( System.currentTimeMillis() - start); averagedMetrics.scope("bytes_fetched").update( response.getContent().length); LOG.info("[Fetcher #{}] Fetched {} with status {}", taskIndex, urlString, response.getStatusCode()); eventCounter.scope("fetched").incrBy(1); response.getMetadata().setValue("fetch.statusCode", Integer.toString(response.getStatusCode())); // update the stats // eventStats.scope("KB downloaded").update((long) // content.length / 1024l); // eventStats.scope("# pages").update(1); response.getMetadata().putAll(metadata); // determine the status based on the status code Status status = Status.fromHTTPCode(response.getStatusCode()); // if the status is OK emit on default stream if (status.equals(Status.FETCHED)) { _collector.emit( Utils.DEFAULT_STREAM_ID, input, new Values(urlString, response.getContent(), response .getMetadata())); } else if (status.equals(Status.REDIRECTION)) { // Mark URL as redirected _collector .emit(com.digitalpebble.storm.crawler.Constants.StatusStreamName, input, new Values(urlString, response.getMetadata(), status)); // find the URL it redirects to String redirection = response.getMetadata().getFirstValue( HttpHeaders.LOCATION); if (allowRedirs && redirection != null && StringUtils.isNotBlank(redirection)) { handleRedirect(input, urlString, redirection, response.getMetadata()); } } else { // Error _collector .emit(com.digitalpebble.storm.crawler.Constants.StatusStreamName, input, new Values(urlString, response.getMetadata(), status)); } } catch (Exception exece) { String message = exece.getMessage(); if (message == null) message = ""; if (exece.getCause() instanceof java.util.concurrent.TimeoutException) LOG.error("Socket timeout fetching {}", urlString); else if (exece.getMessage().contains("connection timed out")) LOG.error("Socket timeout fetching {}", urlString); else LOG.error("Exception while fetching {}", urlString, exece); eventCounter.scope("failed").incrBy(1); // could be an empty, immutable Metadata if (metadata.size() == 0) { metadata = new Metadata(); } // add the reason of the failure in the metadata metadata.setValue("fetch.exception", message); _collector.emit( com.digitalpebble.storm.crawler.Constants.StatusStreamName, input, new Values(urlString, metadata, Status.FETCH_ERROR)); } _collector.ack(input); } private void handleRedirect(Tuple t, String sourceUrl, String newUrl, Metadata sourceMetadata) { // build an absolute URL URL sURL; try { sURL = new URL(sourceUrl); URL tmpURL = URLUtil.resolveURL(sURL, newUrl); newUrl = tmpURL.toExternalForm(); } catch (MalformedURLException e) { LOG.debug("MalformedURLException on {} or {}: {}", sourceUrl, newUrl, e); return; } // apply URL filters if (this.urlFilters != null) { newUrl = this.urlFilters.filter(sURL, sourceMetadata, newUrl); } // filtered if (newUrl == null) { return; } Metadata metadata = metadataTransfer.getMetaForOutlink(newUrl, sourceUrl, sourceMetadata); // TODO check that hasn't exceeded max number of redirections _collector.emit( com.digitalpebble.storm.crawler.Constants.StatusStreamName, t, new Values(newUrl, metadata, Status.DISCOVERED)); } }
/** * Copyright 2015 Palantir Technologies * * Licensed under the BSD-3 License (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://opensource.org/licenses/BSD-3-Clause * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.palantir.atlasdb.keyvalue.api; import java.util.Collection; import java.util.Map; import java.util.Set; import javax.ws.rs.Consumes; import javax.ws.rs.DELETE; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.MediaType; import com.google.common.collect.Multimap; import com.palantir.common.annotation.Idempotent; import com.palantir.common.annotation.NonIdempotent; import com.palantir.common.base.ClosableIterator; import com.palantir.util.paging.BasicResultsPage; import com.palantir.util.paging.TokenBackedBasicResultsPage; /** * A service which stores key-value pairs. */ @Path("/keyvalue") public interface KeyValueService extends AutoCloseable { /** * Performs any initialization that must be done on a fresh instance of the key-value store, * such as creating the metadata table. * * This method should be called when the key-value store is first created. Further calls in the * lifetime of the key-value store should be silently ignored. */ @POST @Path("initialize") void initializeFromFreshInstance(); /** * Performs non-destructive cleanup when the KVS is no longer needed. */ @POST @Path("close") @Override void close(); /** * Performs any cleanup when clearing the database. This method may delete data irrecoverably. */ @POST @Path("teardown") void teardown(); /** * Gets all key value services this key value service delegates to directly. * <p> * This can be used to decompose a complex key value service using table splits, tiers, * or other delegating operations into its subcomponents. */ @POST @Path("get-delegates") @Produces(MediaType.APPLICATION_JSON) Collection<? extends KeyValueService> getDelegates(); /** * Gets values from the key-value store. * * @param tableName the name of the table to retrieve values from. * @param rows set containing the rows to retrieve values for. * @param columnSelection specifies the set of columns to fetch. * @param timestamp specifies the maximum timestamp (exclusive) at which to * retrieve each rows's value. * @return map of retrieved values. Values which do not exist (either * because they were deleted or never created in the first place) * are simply not returned. * @throws IllegalArgumentException if any of the requests were invalid * (e.g., attempting to retrieve values from a non-existent table). */ @POST @Path("get-rows") @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.APPLICATION_JSON) @Idempotent Map<Cell, Value> getRows(@QueryParam("tableName") String tableName, Iterable<byte[]> rows, @QueryParam("columnSelection") ColumnSelection columnSelection, @QueryParam("timestamp") long timestamp); /** * Gets values from the key-value store. * * @param tableName the name of the table to retrieve values from. * @param timestampByCell specifies, for each row, the maximum timestamp (exclusive) at which to * retrieve that rows's value. * @return map of retrieved values. Values which do not exist (either * because they were deleted or never created in the first place) * are simply not returned. * @throws IllegalArgumentException if any of the requests were invalid * (e.g., attempting to retrieve values from a non-existent table). */ @POST @Path("get") @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.APPLICATION_JSON) @Idempotent Map<Cell, Value> get(@QueryParam("tableName") String tableName, Map<Cell, Long> timestampByCell); /** * Gets timestamp values from the key-value store. * * @param tableName the name of the table to retrieve values from. * @param timestampByCell map containing the cells to retrieve timestamps for. The map * specifies, for each key, the maximum timestamp (exclusive) at which to * retrieve that key's value. * @return map of retrieved values. cells which do not exist (either * because they were deleted or never created in the first place) * are simply not returned. * @throws IllegalArgumentException if any of the requests were invalid * (e.g., attempting to retrieve values from a non-existent table). */ @POST @Path("get-latest-timestamps") @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.APPLICATION_JSON) @Idempotent Map<Cell, Long> getLatestTimestamps(@QueryParam("tableName") String tableName, Map<Cell, Long> timestampByCell); /** * Puts values into the key-value store. This call <i>does not</i> guarantee * atomicity across cells. On failure, it is possible * that some of the requests will have succeeded (without having been rolled * back). Similarly, concurrent batched requests may interleave. * <p> * If the key-value store supports durability, this call guarantees that the * requests have successfully been written to disk before returning. * <p> * Putting a null value is the same as putting the empty byte[]. If you want to delete a value * try {@link #delete(String, Multimap)}. * <p> * May throw KeyAlreadyExistsException, if storing a different value to existing key, * but this is not guaranteed even if the key exists - see {@link putUnlessExists}. * <p> * Must not throw KeyAlreadyExistsException when overwriting a cell with the original value (idempotent). * * @param tableName the name of the table to put values into. * @param values map containing the key-value entries to put. * @param timestamp must be non-negative and not equal to {@link Long#MAX_VALUE} */ @POST @Path("put") @Consumes(MediaType.APPLICATION_JSON) @Idempotent void put(@QueryParam("tableName") String tableName, Map<Cell, byte[]> values, @QueryParam("timestamp") long timestamp) throws KeyAlreadyExistsException; /** * Puts values into the key-value store. This call <i>does not</i> guarantee * atomicity across cells. On failure, it is possible * that some of the requests will have succeeded (without having been rolled * back). Similarly, concurrent batched requests may interleave. * <p> * If the key-value store supports durability, this call guarantees that the * requests have successfully been written to disk before returning. * <p> * Putting a null value is the same as putting the empty byte[]. If you want to delete a value * try {@link #delete(String, Multimap)}. * <p> * May throw KeyAlreadyExistsException, if storing a different value to existing key, * but this is not guaranteed even if the key exists - see {@link putUnlessExists}. * <p> * Must not throw KeyAlreadyExistsException when overwriting a cell with the original value (idempotent). * * @param valuesByTable map containing the key-value entries to put by table. * @param timestamp must be non-negative and not equal to {@link Long#MAX_VALUE} */ @POST @Path("multi-put") @Consumes(MediaType.APPLICATION_JSON) @Idempotent void multiPut(Map<String, ? extends Map<Cell, byte[]>> valuesByTable, @QueryParam("timestamp") long timestamp) throws KeyAlreadyExistsException; /** * Puts values into the key-value store with individually specified timestamps. * This call <i>does not</i> guarantee atomicity across cells. On failure, it is possible * that some of the requests will have succeeded (without having been rolled * back). Similarly, concurrent batched requests may interleave. * <p> * If the key-value store supports durability, this call guarantees that the * requests have successfully been written to disk before returning. * <p> * This method may be non-idempotent. On some write-once implementations retrying this * call may result in failure. The way around this is to delete and retry. * <p> * Putting a null value is the same as putting the empty byte[]. If you want to delete a value * try {@link #delete(String, Multimap)}. * <p> * May throw KeyAlreadyExistsException, if storing a different value to existing key, * but this is not guaranteed even if the key exists - see {@link putUnlessExists}. * <p> * Must not throw KeyAlreadyExistsException when overwriting a cell with the original value (idempotent). * * @param tableName the name of the table to put values into. * @param cellValues map containing the key-value entries to put with * non-negative timestamps less than {@link Long#MAX_VALUE}. */ @POST @Path("put-with-timestamps") @Consumes(MediaType.APPLICATION_JSON) @NonIdempotent @Idempotent void putWithTimestamps(@QueryParam("tableName") String tableName, Multimap<Cell, Value> cellValues) throws KeyAlreadyExistsException; /** * Puts values into the key-value store. This call <i>does not</i> guarantee * atomicity across cells. On failure, it is possible * that some of the requests will have succeeded (without having been rolled * back). Similarly, concurrent batched requests may interleave. However, concurrent writes to the same * Cell will not both report success. One of them will throw {@link KeyAlreadyExistsException}. * <p> * A single Cell will only ever take on one value. * <p> * If the call completes successfully then you know that your value was written and no other value was written * first. If a {@link KeyAlreadyExistsException} is thrown it may be because the underlying call did a retry and * your value was actually put successfully. It is recommended that you check the stored value to account for this case. * <p> * Retry should be done by the underlying implementation to ensure that other exceptions besides * {@link KeyAlreadyExistsException} are not thrown spuriously. * * @param tableName the name of the table to put values into. * @param values map containing the key-value entries to put. * @throws KeyAlreadyExistsException If you are putting a Cell with the same timestamp as * one that already exists. */ @POST @Path("put-unless-exists") @Consumes(MediaType.APPLICATION_JSON) void putUnlessExists(@QueryParam("tableName") String tableName, Map<Cell, byte[]> values) throws KeyAlreadyExistsException; /** * Deletes values from the key-value store. * <p> * This call <i>does not</i> guarantee atomicity for deletes across (Cell, ts) pairs. However it * MUST be implemented where timestamps are deleted in increasing order for each Cell. This * means that if there is a request to delete (c, 1) and (c, 2) then the system will never be in * a state where (c, 2) was successfully deleted but (c, 1) still remains. It is possible that * if there is a failure, then some of the cells may have succeeded. Similarly, concurrent * batched requests may interleave. * <p> * If the key-value store supports durability, this call guarantees that the requests have * successfully been written to disk before returning. * <p> * If a key value store supports garbage collection, then a call to delete should mean the value * will not be read in the future. If GC isn't supported, then delete can be written to have a * best effort attempt to delete the values. * <p> * Some systems may require more nodes to be up to ensure that a delete is successful. If this * is the case then this method may throw if the delete can't be completed on all nodes. * * @param tableName the name of the table to delete values from. * @param keys map containing the keys to delete values for; the map should specify, for each * key, the timestamp of the value to delete. */ @POST @Path("delete") @Consumes(MediaType.APPLICATION_JSON) @Idempotent void delete(@QueryParam("tableName") String tableName, Multimap<Cell, Long> keys); /** * Truncate a table in the key-value store. * <p> * This is preferred to dropping and re-adding a table, as live schema changes can * be a complicated topic for distributed databases. * * @param tableName the name of the table to truncate. * * @throws InsufficientConsistencyException if not all hosts respond successfully */ @POST @Path("truncate-table") @Consumes(MediaType.APPLICATION_JSON) @Idempotent void truncateTable(@QueryParam("tableName") String tableName) throws InsufficientConsistencyException; /** * Truncate tables in the key-value store. * <p> * This can be slightly faster than truncating a single table. * * @param tableNames the name of the tables to truncate. * * @throws InsufficientConsistencyException if not all hosts respond successfully */ @POST @Path("truncate-tables") @Consumes(MediaType.APPLICATION_JSON) @Idempotent void truncateTables(Set<String> tableNames) throws InsufficientConsistencyException; /** * For each row in the specified range, returns the most recent version strictly before * timestamp. * * Remember to close any {@link ClosableIterator}s you get in a finally block. * * @param tableName * @param rangeRequest the range to load. * @param timestamp specifies the maximum timestamp (exclusive) at which to retrieve each rows's * value. */ @POST @Path("get-range") @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.APPLICATION_JSON) @Idempotent ClosableIterator<RowResult<Value>> getRange(@QueryParam("tableName") String tableName, RangeRequest rangeRequest, @QueryParam("timestamp") long timestamp); /** * For each row in the specified range, returns all versions strictly before * timestamp. * <p> * This has the same consistency guarantees that {@link #getRangeOfTimestamps(String, RangeRequest, long)}. * <p> * Remember to close any {@link ClosableIterator}s you get in a finally block. * * @param tableName * @param rangeRequest the range to load. * @param timestamp specifies the maximum timestamp (exclusive) at which to * retrieve each rows's values. */ @POST @Path("get-range-with-history") @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.APPLICATION_JSON) @Idempotent ClosableIterator<RowResult<Set<Value>>> getRangeWithHistory(@QueryParam("tableName") String tableName, RangeRequest rangeRequest, @QueryParam("timestamp") long timestamp); /** * Gets timestamp values from the key-value store. For each row, this returns all associated * timestamps &lt; given_ts. * <p> * This method has stronger consistency guarantees than regular read requests. This must return * all timestamps stored anywhere in the system. An example of where this could happen is if we * use a system with QUORUM reads and writes. Under normal operations reads only need to talk to * a Quorum of hosts. However this call MUST be implemented by talking to ALL the nodes where a * value could be stored. * * @param tableName the name of the table to read from. * @param rangeRequest the range to load. * @param timestamp the maximum timestamp to load. * * @throws InsufficientConsistencyException if not all hosts respond successfully */ @POST @Path("get-range-of-timestamps") @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.APPLICATION_JSON) @Idempotent ClosableIterator<RowResult<Set<Long>>> getRangeOfTimestamps(@QueryParam("tableName") String tableName, RangeRequest rangeRequest, @QueryParam("timestamp") long timestamp) throws InsufficientConsistencyException; /** * For each range passed in the result will have the first page of results for that range. * <p> * The page size for each range is dictated by the parameter {@link RangeRequest#getBatchHint()}. * If no batch size hint is specified for a range, then it will just get the first row in * that range. * <p> * It is possible that the results may be empty if the first cells after the start of the range * all have timestamps greater than the requested timestamp. In this case * {@link TokenBackedBasicResultsPage#moreResultsAvailable()} will return true and the token * for the next page will be set. * <p> * It may be possible to get back a result with {@link BasicResultsPage#moreResultsAvailable()} * set to true when there aren't more left. The next call will return zero results and have * moreResultsAvailable set to false. */ @POST @Path("get-first-batch-for-ranges") @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.APPLICATION_JSON) @Idempotent Map<RangeRequest, TokenBackedBasicResultsPage<RowResult<Value>, byte[]>> getFirstBatchForRanges(@QueryParam("tableName") String tableName, Iterable<RangeRequest> rangeRequests, @QueryParam("timestamp") long timestamp); //////////////////////////////////////////////////////////// // TABLE CREATION AND METADATA //////////////////////////////////////////////////////////// @DELETE @Path("drop-table") @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.APPLICATION_JSON) @Idempotent void dropTable(@QueryParam("tableName") String tableName) throws InsufficientConsistencyException; /** * Drops many tables in idempotent fashion. If you are dropping many tables at once, * use this call as the implementation can be much faster/less error-prone on some KVSs. * * @param tableNames */ @DELETE @Path("drop-tables") @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.APPLICATION_JSON) @Idempotent void dropTables(Set<String> tableNames) throws InsufficientConsistencyException; /** * Creates a table with the specified name. If the table already exists, no action is performed * (the table is left in its current state). * * @param tableName * @param maxValueSizeInBytes This may be used by the key value store to * throw if a value is too big. It may also be used by the store as a * hint for small values so we can cache them more effectively in memory. */ @POST @Path("create-table") @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.APPLICATION_JSON) @Idempotent void createTable(@QueryParam("tableName") String tableName, @QueryParam("maxValueSizeInBytes") int maxValueSizeInBytes) throws InsufficientConsistencyException; /** * Creates many tables in idempotent fashion. If you are making many tables at once, * use this call as the implementation can be much faster/less error-prone on some KVSs. * * @param tableNamesToMaxValueSizeInBytes This may be used by the key value store to * throw if a value is too big. It may also be used by the store as a * hint for small values so we can cache them more effectively in memory. */ @POST @Path("create-tables") @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.APPLICATION_JSON) @Idempotent void createTables(Map<String, Integer> tableNamesToMaxValueSizeInBytes) throws InsufficientConsistencyException; /** * Return the list of tables stored in this key value service. * * This will contain system tables (such as the _transaction table), but will not contain * the names of any tables used internally by the key value service (a common example is * a _metadata table for storing table metadata). */ @POST @Path("get-all-table-names") @Produces(MediaType.APPLICATION_JSON) @Idempotent Set<String> getAllTableNames(); /** * Gets the metadata for a given table. Also useful for checking to see if a table exists. * * @return a byte array representing the metadata for the table. Array is empty if no table * with the given name exists. Consider {@link TableMetadata#BYTES_HYDRATOR} for hydrating. */ @Idempotent @POST @Path("get-metadata-for-table") @Produces(MediaType.APPLICATION_OCTET_STREAM) byte[] getMetadataForTable(@QueryParam("tableName") String tableName); @POST @Path("get-metadata-for-tables") @Produces(MediaType.APPLICATION_JSON) @Idempotent Map<String, byte[]> getMetadataForTables(); @POST @Path("put-metadata-for-table") @Consumes(MediaType.APPLICATION_OCTET_STREAM) @Idempotent void putMetadataForTable(@QueryParam("tableName") String tableName, byte[] metadata); @POST @Path("put-metadata-for-tables") @Consumes(MediaType.APPLICATION_JSON) @Idempotent void putMetadataForTables(final Map<String, byte[]> tableNameToMetadata); //////////////////////////////////////////////////////////// // METHODS TO SUPPORT GARBAGE COLLECTION //////////////////////////////////////////////////////////// /** * Adds a value with timestamp = Value.INVALID_VALUE_TIMESTAMP to each of the given cells. If * a value already exists at that time stamp, nothing is written for that cell. */ @POST @Path("add-gc-sentinel-values") @Consumes(MediaType.APPLICATION_JSON) @Idempotent void addGarbageCollectionSentinelValues(@QueryParam("tableName") String tableName, Set<Cell> cells); /** * Gets timestamp values from the key-value store. For each cell, this returns all associated * timestamps &lt; given_ts. * <p> * This method has stronger consistency guarantees than regular read requests. This must return * all timestamps stored anywhere in the system. An example of where this could happen is if we * use a system with QUORUM reads and writes. Under normal operations reads only need to talk to * a Quorum of hosts. However this call MUST be implemented by talking to ALL the nodes where a * value could be stored. * * @param tableName the name of the table to delete values from. * @param cells set containg cells to retrieve timestamps for. * @param timestamp maximum timestamp to get (exclusive) * @return multimap of timestamps by cell * * @throws InsufficientConsistencyException if not all hosts respond successfully */ @POST @Path("get-all-timestamps") @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.APPLICATION_JSON) @Idempotent Multimap<Cell, Long> getAllTimestamps(@QueryParam("tableName") String tableName, Set<Cell> cells, @QueryParam("timestamp") long timestamp) throws InsufficientConsistencyException; /** * Does whatever can be done to compact or cleanup a table. Intended to be called after many * deletions are performed. * * This call must be implemented so that it completes synchronously. */ @POST @Path("compact-internally") @Consumes(MediaType.APPLICATION_JSON) void compactInternally(String tableName); }
package org.apache.lucene.util.automaton; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.util.TreeMap; import java.util.Map; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.RamUsageEstimator; // Just holds a set of int[] states, plus a corresponding // int[] count per state. Used by // BasicOperations.determinize final class SortedIntSet { int[] values; int[] counts; int upto; private int hashCode; // If we hold more than this many states, we switch from // O(N^2) linear ops to O(N log(N)) TreeMap private final static int TREE_MAP_CUTOVER = 30; private final Map<Integer,Integer> map = new TreeMap<Integer,Integer>(); private boolean useTreeMap; State state; public SortedIntSet(int capacity) { values = new int[capacity]; counts = new int[capacity]; } // Adds this state to the set public void incr(int num) { if (useTreeMap) { final Integer key = num; Integer val = map.get(key); if (val == null) { map.put(key, 1); } else { map.put(key, 1+val); } return; } if (upto == values.length) { values = ArrayUtil.grow(values, 1+upto); counts = ArrayUtil.grow(counts, 1+upto); } for(int i=0;i<upto;i++) { if (values[i] == num) { counts[i]++; return; } else if (num < values[i]) { // insert here int j = upto-1; while (j >= i) { values[1+j] = values[j]; counts[1+j] = counts[j]; j--; } values[i] = num; counts[i] = 1; upto++; return; } } // append values[upto] = num; counts[upto] = 1; upto++; if (upto == TREE_MAP_CUTOVER) { useTreeMap = true; for(int i=0;i<upto;i++) { map.put(values[i], counts[i]); } } } // Removes this state from the set, if count decrs to 0 public void decr(int num) { if (useTreeMap) { final int count = map.get(num); if (count == 1) { map.remove(num); } else { map.put(num, count-1); } // Fall back to simple arrays once we touch zero again if (map.size() == 0) { useTreeMap = false; upto = 0; } return; } for(int i=0;i<upto;i++) { if (values[i] == num) { counts[i]--; if (counts[i] == 0) { final int limit = upto-1; while(i < limit) { values[i] = values[i+1]; counts[i] = counts[i+1]; i++; } upto = limit; } return; } } assert false; } public void computeHash() { if (useTreeMap) { if (map.size() > values.length) { final int size = ArrayUtil.oversize(map.size(), RamUsageEstimator.NUM_BYTES_INT); values = new int[size]; counts = new int[size]; } hashCode = map.size(); upto = 0; for(int state : map.keySet()) { hashCode = 683*hashCode + state; values[upto++] = state; } } else { hashCode = upto; for(int i=0;i<upto;i++) { hashCode = 683*hashCode + values[i]; } } } public FrozenIntSet freeze(State state) { final int[] c = new int[upto]; System.arraycopy(values, 0, c, 0, upto); return new FrozenIntSet(c, hashCode, state); } @Override public int hashCode() { return hashCode; } @Override public boolean equals(Object _other) { if (_other == null) { return false; } if (!(_other instanceof FrozenIntSet)) { return false; } FrozenIntSet other = (FrozenIntSet) _other; if (hashCode != other.hashCode) { return false; } if (other.values.length != upto) { return false; } for(int i=0;i<upto;i++) { if (other.values[i] != values[i]) { return false; } } return true; } @Override public String toString() { StringBuilder sb = new StringBuilder().append('['); for(int i=0;i<upto;i++) { if (i > 0) { sb.append(' '); } sb.append(values[i]).append(':').append(counts[i]); } sb.append(']'); return sb.toString(); } public final static class FrozenIntSet { final int[] values; final int hashCode; final State state; public FrozenIntSet(int[] values, int hashCode, State state) { this.values = values; this.hashCode = hashCode; this.state = state; } public FrozenIntSet(int num, State state) { this.values = new int[] {num}; this.state = state; this.hashCode = 683+num; } @Override public int hashCode() { return hashCode; } @Override public boolean equals(Object _other) { if (_other == null) { return false; } if (_other instanceof FrozenIntSet) { FrozenIntSet other = (FrozenIntSet) _other; if (hashCode != other.hashCode) { return false; } if (other.values.length != values.length) { return false; } for(int i=0;i<values.length;i++) { if (other.values[i] != values[i]) { return false; } } return true; } else if (_other instanceof SortedIntSet) { SortedIntSet other = (SortedIntSet) _other; if (hashCode != other.hashCode) { return false; } if (other.values.length != values.length) { return false; } for(int i=0;i<values.length;i++) { if (other.values[i] != values[i]) { return false; } } return true; } return false; } @Override public String toString() { StringBuilder sb = new StringBuilder().append('['); for(int i=0;i<values.length;i++) { if (i > 0) { sb.append(' '); } sb.append(values[i]); } sb.append(']'); return sb.toString(); } } }
/* * Copyright (C) 2008 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.gson.functional; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.gson.JsonDeserializationContext; import com.google.gson.JsonDeserializer; import com.google.gson.JsonElement; import com.google.gson.JsonParseException; import com.google.gson.JsonPrimitive; import com.google.gson.JsonSerializationContext; import com.google.gson.JsonSerializer; import com.google.gson.annotations.SerializedName; import com.google.gson.common.MoreAsserts; import com.google.gson.reflect.TypeToken; import java.lang.reflect.Type; import java.util.ArrayList; import java.util.Collection; import java.util.EnumSet; import java.util.Set; import junit.framework.TestCase; /** * Functional tests for Java 5.0 enums. * * @author Inderjeet Singh * @author Joel Leitch */ public class EnumTest extends TestCase { private Gson gson; @Override protected void setUp() throws Exception { super.setUp(); gson = new Gson(); } public void testTopLevelEnumSerialization() throws Exception { String result = gson.toJson(MyEnum.VALUE1); assertEquals('"' + MyEnum.VALUE1.toString() + '"', result); } public void testTopLevelEnumDeserialization() throws Exception { MyEnum result = gson.fromJson('"' + MyEnum.VALUE1.toString() + '"', MyEnum.class); assertEquals(MyEnum.VALUE1, result); } public void testCollectionOfEnumsSerialization() { Type type = new TypeToken<Collection<MyEnum>>() {}.getType(); Collection<MyEnum> target = new ArrayList<MyEnum>(); target.add(MyEnum.VALUE1); target.add(MyEnum.VALUE2); String expectedJson = "[\"VALUE1\",\"VALUE2\"]"; String actualJson = gson.toJson(target); assertEquals(expectedJson, actualJson); actualJson = gson.toJson(target, type); assertEquals(expectedJson, actualJson); } public void testCollectionOfEnumsDeserialization() { Type type = new TypeToken<Collection<MyEnum>>() {}.getType(); String json = "[\"VALUE1\",\"VALUE2\"]"; Collection<MyEnum> target = gson.fromJson(json, type); MoreAsserts.assertContains(target, MyEnum.VALUE1); MoreAsserts.assertContains(target, MyEnum.VALUE2); } public void testClassWithEnumFieldSerialization() throws Exception { ClassWithEnumFields target = new ClassWithEnumFields(); assertEquals(target.getExpectedJson(), gson.toJson(target)); } public void testClassWithEnumFieldDeserialization() throws Exception { String json = "{value1:'VALUE1',value2:'VALUE2'}"; ClassWithEnumFields target = gson.fromJson(json, ClassWithEnumFields.class); assertEquals(MyEnum.VALUE1,target.value1); assertEquals(MyEnum.VALUE2,target.value2); } private static enum MyEnum { VALUE1, VALUE2 } private static class ClassWithEnumFields { private final MyEnum value1 = MyEnum.VALUE1; private final MyEnum value2 = MyEnum.VALUE2; public String getExpectedJson() { return "{\"value1\":\"" + value1 + "\",\"value2\":\"" + value2 + "\"}"; } } /** * Test for issue 226. */ public void testEnumSubclass() { assertFalse(Roshambo.class == Roshambo.ROCK.getClass()); assertEquals("\"ROCK\"", gson.toJson(Roshambo.ROCK)); assertEquals("[\"ROCK\",\"PAPER\",\"SCISSORS\"]", gson.toJson(EnumSet.allOf(Roshambo.class))); assertEquals(Roshambo.ROCK, gson.fromJson("\"ROCK\"", Roshambo.class)); assertEquals(EnumSet.allOf(Roshambo.class), gson.fromJson("[\"ROCK\",\"PAPER\",\"SCISSORS\"]", new TypeToken<Set<Roshambo>>() {}.getType())); } public void testEnumSubclassWithRegisteredTypeAdapter() { gson = new GsonBuilder() .registerTypeHierarchyAdapter(Roshambo.class, new MyEnumTypeAdapter()) .create(); assertFalse(Roshambo.class == Roshambo.ROCK.getClass()); assertEquals("\"123ROCK\"", gson.toJson(Roshambo.ROCK)); assertEquals("[\"123ROCK\",\"123PAPER\",\"123SCISSORS\"]", gson.toJson(EnumSet.allOf(Roshambo.class))); assertEquals(Roshambo.ROCK, gson.fromJson("\"123ROCK\"", Roshambo.class)); assertEquals(EnumSet.allOf(Roshambo.class), gson.fromJson("[\"123ROCK\",\"123PAPER\",\"123SCISSORS\"]", new TypeToken<Set<Roshambo>>() {}.getType())); } public void testEnumSubclassAsParameterizedType() { Collection<Roshambo> list = new ArrayList<Roshambo>(); list.add(Roshambo.ROCK); list.add(Roshambo.PAPER); String json = gson.toJson(list); assertEquals("[\"ROCK\",\"PAPER\"]", json); Type collectionType = new TypeToken<Collection<Roshambo>>() {}.getType(); Collection<Roshambo> actualJsonList = gson.fromJson(json, collectionType); MoreAsserts.assertContains(actualJsonList, Roshambo.ROCK); MoreAsserts.assertContains(actualJsonList, Roshambo.PAPER); } public void testEnumCaseMapping() { assertEquals(Gender.MALE, gson.fromJson("\"boy\"", Gender.class)); assertEquals("\"boy\"", gson.toJson(Gender.MALE, Gender.class)); } public void testEnumSet() { EnumSet<Roshambo> foo = EnumSet.of(Roshambo.ROCK, Roshambo.PAPER); String json = gson.toJson(foo); Type type = new TypeToken<EnumSet<Roshambo>>() {}.getType(); EnumSet<Roshambo> bar = gson.fromJson(json, type); assertTrue(bar.contains(Roshambo.ROCK)); assertTrue(bar.contains(Roshambo.PAPER)); assertFalse(bar.contains(Roshambo.SCISSORS)); } public enum Roshambo { ROCK { @Override Roshambo defeats() { return SCISSORS; } }, PAPER { @Override Roshambo defeats() { return ROCK; } }, SCISSORS { @Override Roshambo defeats() { return PAPER; } }; abstract Roshambo defeats(); } private static class MyEnumTypeAdapter implements JsonSerializer<Roshambo>, JsonDeserializer<Roshambo> { @Override public JsonElement serialize(Roshambo src, Type typeOfSrc, JsonSerializationContext context) { return new JsonPrimitive("123" + src.name()); } @Override public Roshambo deserialize(JsonElement json, Type classOfT, JsonDeserializationContext context) throws JsonParseException { return Roshambo.valueOf(json.getAsString().substring(3)); } } public enum Gender { @SerializedName("boy") MALE, @SerializedName("girl") FEMALE } }
/* * @(#)TypeSurrogateSelectorImpl.java 1.0 * * Created on September 18, 2008, 12:59 PM * * Copyright 2008 NeXtreme Innovations, Inc. All rights reserved. * "NeXtreme Innovations" PROPRIETARY/CONFIDENTIAL. Use is subject * to license terms. */ package com.alachisoft.tayzgrid.serialization.standard.io; import com.alachisoft.tayzgrid.serialization.core.io.TypeSurrogateConstants; import com.alachisoft.tayzgrid.serialization.core.io.TypeSurrogateSelectorBase; import com.alachisoft.tayzgrid.serialization.core.io.surrogates.CacheArgumentException; import com.alachisoft.tayzgrid.serialization.standard.io.surrogates.AverageResultSerializationSurrogate; import com.alachisoft.tayzgrid.serialization.standard.io.surrogates.BigDecimalSerializationSurrogate; import com.alachisoft.tayzgrid.serialization.standard.io.surrogates.BigIntegerSerializationSurrogate; import com.alachisoft.tayzgrid.serialization.standard.io.surrogates.BooleanArraySerializationSurrogate; import com.alachisoft.tayzgrid.serialization.standard.io.surrogates.BooleanSerializationSurrogate; import com.alachisoft.tayzgrid.serialization.standard.io.surrogates.ByteArraySerializationSurrogate; import com.alachisoft.tayzgrid.serialization.standard.io.surrogates.ByteSerializationSurrogate; import com.alachisoft.tayzgrid.serialization.standard.io.surrogates.CharArraySerializationSurrogate; import com.alachisoft.tayzgrid.serialization.standard.io.surrogates.CharSerializationSurrogate; import com.alachisoft.tayzgrid.serialization.standard.io.surrogates.CollectionSerializationSurrogate; import com.alachisoft.tayzgrid.serialization.standard.io.surrogates.DateArraySerializationSurrogate; import com.alachisoft.tayzgrid.serialization.standard.io.surrogates.DateSerializationSurrogate; import com.alachisoft.tayzgrid.serialization.standard.io.surrogates.DoubleArraySerializationSurrogate; import com.alachisoft.tayzgrid.serialization.standard.io.surrogates.DoubleSerializationSurrogate; import com.alachisoft.tayzgrid.serialization.standard.io.surrogates.EOFDotNetSerializationSurrogate; import com.alachisoft.tayzgrid.serialization.standard.io.surrogates.EOFJavaSerializationSurrogate; import com.alachisoft.tayzgrid.serialization.standard.io.surrogates.FloatArraySerializationSurrogate; import com.alachisoft.tayzgrid.serialization.standard.io.surrogates.FloatSerializationSurrogate; import com.alachisoft.tayzgrid.serialization.standard.io.surrogates.GenericArraySerializationSurrogate; import com.alachisoft.tayzgrid.serialization.standard.io.surrogates.IntegerArraySerializationSurrogate; import com.alachisoft.tayzgrid.serialization.standard.io.surrogates.IntegerSerializationSurrogate; import com.alachisoft.tayzgrid.serialization.standard.io.surrogates.LongArraySerializationSurrogate; import com.alachisoft.tayzgrid.serialization.standard.io.surrogates.LongSerializationSurrogate; import com.alachisoft.tayzgrid.serialization.standard.io.surrogates.MapSerializationSurrogate; import com.alachisoft.tayzgrid.serialization.standard.io.surrogates.NullSerializationSurrogate; import com.alachisoft.tayzgrid.serialization.standard.io.surrogates.ObjectArraySerializationSurrogate; import com.alachisoft.tayzgrid.serialization.standard.io.surrogates.ObjectSerializationSurrogate; import com.alachisoft.tayzgrid.serialization.standard.io.surrogates.ShortArraySerializationSurrogate; import com.alachisoft.tayzgrid.serialization.standard.io.surrogates.ShortSerializationSurrogate; import com.alachisoft.tayzgrid.serialization.standard.io.surrogates.SkipSerializationSurrogate; import com.alachisoft.tayzgrid.serialization.standard.io.surrogates.SqlDateSerializationSurrogate; import com.alachisoft.tayzgrid.serialization.standard.io.surrogates.SqlTimeSerializationSurrogate; import com.alachisoft.tayzgrid.serialization.standard.io.surrogates.SqlTimestampSerializationSurrogate; import com.alachisoft.tayzgrid.serialization.standard.io.surrogates.StackTraceElementSerializationSurrogate; import com.alachisoft.tayzgrid.serialization.standard.io.surrogates.StringArraySerializationSurrogate; import com.alachisoft.tayzgrid.serialization.standard.io.surrogates.StringSerializationSurrogate; import com.alachisoft.tayzgrid.serialization.standard.io.surrogates.ThrowableSerializationSurrogate; import com.alachisoft.tayzgrid.serialization.standard.io.surrogates.UInt16ArraySerializationSurrogate; import com.alachisoft.tayzgrid.serialization.standard.io.surrogates.UInt16SerializationSurrogate; import com.alachisoft.tayzgrid.serialization.standard.io.surrogates.UInt32ArraySerializationSurrogate; import com.alachisoft.tayzgrid.serialization.standard.io.surrogates.UInt32SerializationSurrogate; import com.alachisoft.tayzgrid.serialization.standard.io.surrogates.UInt64ArraySerializationSurrogate; import com.alachisoft.tayzgrid.serialization.standard.io.surrogates.UInt64SerializationSurrogate; import com.alachisoft.tayzgrid.serialization.standard.io.surrogates.UnsignedByteArraySerializationSurrogate; import com.alachisoft.tayzgrid.serialization.standard.io.surrogates.UnsignedByteSerializationSurrogate; import java.math.BigInteger; import java.util.ArrayList; import java.util.HashMap; import java.util.Hashtable; import java.util.LinkedList; /** * TypeSurrogateSelectorImpl class. * * @version 1.0, September 18, 2008 */ public class TypeSurrogateSelectorImpl extends TypeSurrogateSelectorBase { /** * Minimum id that can be assigned to a user defined type. */ private static TypeSurrogateSelectorImpl msDefault; /** * Creates a new instance of TypeSurrogateSelectorBase */ public TypeSurrogateSelectorImpl() { construct(true); } /** * Creates a new instance of TypeSurrogateSelectorBase */ public TypeSurrogateSelectorImpl(boolean useBuiltinSurrogates) { construct(useBuiltinSurrogates); } /*** */ private void construct(boolean useBuiltinSurrogates) { mNullSurrogate = new NullSerializationSurrogate(); mDefaultSurrogate = new ObjectSerializationSurrogate(Object.class); mDefaultArraySurrogate = new ObjectArraySerializationSurrogate(); try { this.register(mNullSurrogate, NullSerializationSurrogate.HARD_HANDLE); this.register(mDefaultSurrogate, ObjectSerializationSurrogate.HARD_HANDLE); this.register(mDefaultArraySurrogate, ObjectArraySerializationSurrogate.HARD_HANDLE); if (useBuiltinSurrogates) { registerBuiltinSurrogates(); } } catch (CacheArgumentException ex) { } } /* * Returns the default type surrogate selector Object */ public static TypeSurrogateSelectorImpl getDefault() { if(TypeSurrogateSelectorImpl.msDefault == null) TypeSurrogateSelectorImpl.msDefault = new TypeSurrogateSelectorImpl(); return TypeSurrogateSelectorImpl.msDefault; } /* * Returns the default type surrogate selector Object */ public static void setDefault(TypeSurrogateSelectorImpl value) { TypeSurrogateSelectorImpl.msDefault = value; } /** * Unregisters all surrogates, except null and default ones. */ public void clear() { super.clear(); try { this.register(mNullSurrogate, NullSerializationSurrogate.HARD_HANDLE); this.register(mDefaultSurrogate, ObjectSerializationSurrogate.HARD_HANDLE); this.register(mDefaultArraySurrogate, ObjectSerializationSurrogate.HARD_HANDLE); } catch (CacheArgumentException ex) { } } /** * Registers built-in surrogates with the system. */ public synchronized void registerBuiltinSurrogates() throws CacheArgumentException { short typeHandle = TypeSurrogateConstants.FirstTypeHandle + 100; typeHandle = TypeSurrogateConstants.FirstTypeHandle + 4; register(new BooleanSerializationSurrogate(), typeHandle++); register(new UnsignedByteSerializationSurrogate(), typeHandle++); register(new CharSerializationSurrogate(), typeHandle++); register(new FloatSerializationSurrogate(), typeHandle++); register(new DoubleSerializationSurrogate(), typeHandle++); register(new BigDecimalSerializationSurrogate(), typeHandle++); register(new ShortSerializationSurrogate(), typeHandle++); register(new IntegerSerializationSurrogate(), typeHandle++); register(new LongSerializationSurrogate(), typeHandle++); register(new StringSerializationSurrogate(), typeHandle++ ); register(new DateSerializationSurrogate(), typeHandle++); register(new NullSerializationSurrogate(), typeHandle++); register(new BooleanArraySerializationSurrogate(), typeHandle++); register(new UnsignedByteArraySerializationSurrogate(), typeHandle++); register(new CharArraySerializationSurrogate(), typeHandle++); register(new FloatArraySerializationSurrogate(), typeHandle++); register(new DoubleArraySerializationSurrogate(), typeHandle++); register(new ShortArraySerializationSurrogate(), typeHandle++); register(new IntegerArraySerializationSurrogate(), typeHandle++); register(new LongArraySerializationSurrogate(), typeHandle++); register(new StringArraySerializationSurrogate(), typeHandle++); register(new AverageResultSerializationSurrogate(), typeHandle++); //End of File for Java: To be provided by .Net Client register(new EOFJavaSerializationSurrogate(), typeHandle++); //End of File for Java: To be provided by Java Client register(new EOFDotNetSerializationSurrogate(), typeHandle++); //Skip this value Surrogate register(new SkipSerializationSurrogate(), typeHandle++); register(new NullSerializationSurrogate(), typeHandle++); register(new DateArraySerializationSurrogate(), typeHandle++); register(new NullSerializationSurrogate(), typeHandle++); register(new ByteArraySerializationSurrogate(), typeHandle++); register(new UInt16ArraySerializationSurrogate(), typeHandle++); register(new UInt32ArraySerializationSurrogate(), typeHandle++); register(new UInt64ArraySerializationSurrogate(), typeHandle++); register(new NullSerializationSurrogate(), typeHandle++); register(new ByteSerializationSurrogate(), typeHandle++); register(new UInt16SerializationSurrogate(), typeHandle++); register(new UInt32SerializationSurrogate(), typeHandle++); register(new UInt64SerializationSurrogate(), typeHandle++); register(new NullSerializationSurrogate(), typeHandle++); register(new CollectionSerializationSurrogate(ArrayList.class) { }, typeHandle++); register(new NullSerializationSurrogate(), typeHandle++); register(new MapSerializationSurrogate(HashMap.class) { }, typeHandle++); register(new GenericArraySerializationSurrogate<BigInteger>(BigInteger[].class), typeHandle++); register(new CollectionSerializationSurrogate(LinkedList.class) { }, typeHandle++); typeHandle = TypeSurrogateConstants.FirstTypeHandle + 400; register(new ThrowableSerializationSurrogate(Exception.class), typeHandle++); register(new ThrowableSerializationSurrogate(Error.class), typeHandle++); register(new StackTraceElementSerializationSurrogate(), typeHandle++); register(new GenericArraySerializationSurrogate<StackTraceElement>(StackTraceElement[].class), typeHandle++); typeHandle = TypeSurrogateConstants.FirstTypeHandle + 600; register(new GenericArraySerializationSurrogate<Boolean>(Boolean[].class), typeHandle++); register(new GenericArraySerializationSurrogate<Byte>(Byte[].class), typeHandle++); register(new GenericArraySerializationSurrogate<Character>(Character[].class), typeHandle++); register(new GenericArraySerializationSurrogate<Float>(Float[].class), typeHandle++); register(new GenericArraySerializationSurrogate<Double>(Double[].class), typeHandle++); register(new GenericArraySerializationSurrogate<Short>(Short[].class), typeHandle++); register(new GenericArraySerializationSurrogate<Integer>(Integer[].class), typeHandle++); register(new GenericArraySerializationSurrogate<Long>(Long[].class), typeHandle++); typeHandle = TypeSurrogateConstants.FirstTypeHandle + 900; register(new SqlDateSerializationSurrogate(), typeHandle++); register(new SqlTimeSerializationSurrogate(), typeHandle++); register(new SqlTimestampSerializationSurrogate(), typeHandle++); register(new BigIntegerSerializationSurrogate(), typeHandle++); register(new MapSerializationSurrogate(Hashtable.class) { },(short) 2000); } }
/* Generated by camel build tools - do NOT edit this file! */ package org.apache.camel.component.hdfs; import java.util.Map; import org.apache.camel.CamelContext; import org.apache.camel.spi.GeneratedPropertyConfigurer; import org.apache.camel.spi.PropertyConfigurerGetter; import org.apache.camel.util.CaseInsensitiveMap; import org.apache.camel.support.component.PropertyConfigurerSupport; /** * Generated by camel build tools - do NOT edit this file! */ @SuppressWarnings("unchecked") public class HdfsEndpointConfigurer extends PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter { @Override public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) { HdfsEndpoint target = (HdfsEndpoint) obj; switch (ignoreCase ? name.toLowerCase() : name) { case "append": target.getConfig().setAppend(property(camelContext, boolean.class, value)); return true; case "backofferrorthreshold": case "backoffErrorThreshold": target.setBackoffErrorThreshold(property(camelContext, int.class, value)); return true; case "backoffidlethreshold": case "backoffIdleThreshold": target.setBackoffIdleThreshold(property(camelContext, int.class, value)); return true; case "backoffmultiplier": case "backoffMultiplier": target.setBackoffMultiplier(property(camelContext, int.class, value)); return true; case "basicpropertybinding": case "basicPropertyBinding": target.setBasicPropertyBinding(property(camelContext, boolean.class, value)); return true; case "blocksize": case "blockSize": target.getConfig().setBlockSize(property(camelContext, long.class, value)); return true; case "bridgeerrorhandler": case "bridgeErrorHandler": target.setBridgeErrorHandler(property(camelContext, boolean.class, value)); return true; case "buffersize": case "bufferSize": target.getConfig().setBufferSize(property(camelContext, int.class, value)); return true; case "checkidleinterval": case "checkIdleInterval": target.getConfig().setCheckIdleInterval(property(camelContext, int.class, value)); return true; case "chunksize": case "chunkSize": target.getConfig().setChunkSize(property(camelContext, int.class, value)); return true; case "compressioncodec": case "compressionCodec": target.getConfig().setCompressionCodec(property(camelContext, org.apache.camel.component.hdfs.HdfsCompressionCodec.class, value)); return true; case "compressiontype": case "compressionType": target.getConfig().setCompressionType(property(camelContext, org.apache.hadoop.io.SequenceFile.CompressionType.class, value)); return true; case "connectonstartup": case "connectOnStartup": target.getConfig().setConnectOnStartup(property(camelContext, boolean.class, value)); return true; case "delay": target.setDelay(property(camelContext, long.class, value)); return true; case "exceptionhandler": case "exceptionHandler": target.setExceptionHandler(property(camelContext, org.apache.camel.spi.ExceptionHandler.class, value)); return true; case "exchangepattern": case "exchangePattern": target.setExchangePattern(property(camelContext, org.apache.camel.ExchangePattern.class, value)); return true; case "filesystemtype": case "fileSystemType": target.getConfig().setFileSystemType(property(camelContext, org.apache.camel.component.hdfs.HdfsFileSystemType.class, value)); return true; case "filetype": case "fileType": target.getConfig().setFileType(property(camelContext, org.apache.camel.component.hdfs.HdfsFileType.class, value)); return true; case "greedy": target.setGreedy(property(camelContext, boolean.class, value)); return true; case "initialdelay": case "initialDelay": target.setInitialDelay(property(camelContext, long.class, value)); return true; case "kerberosconfigfilelocation": case "kerberosConfigFileLocation": target.getConfig().setKerberosConfigFileLocation(property(camelContext, java.lang.String.class, value)); return true; case "kerberoskeytablocation": case "kerberosKeytabLocation": target.getConfig().setKerberosKeytabLocation(property(camelContext, java.lang.String.class, value)); return true; case "kerberosusername": case "kerberosUsername": target.getConfig().setKerberosUsername(property(camelContext, java.lang.String.class, value)); return true; case "keytype": case "keyType": target.getConfig().setKeyType(property(camelContext, org.apache.camel.component.hdfs.WritableType.class, value)); return true; case "lazystartproducer": case "lazyStartProducer": target.setLazyStartProducer(property(camelContext, boolean.class, value)); return true; case "maxmessagesperpoll": case "maxMessagesPerPoll": target.getConfig().setMaxMessagesPerPoll(property(camelContext, int.class, value)); return true; case "namednodes": case "namedNodes": target.getConfig().setNamedNodes(property(camelContext, java.lang.String.class, value)); return true; case "openedsuffix": case "openedSuffix": target.getConfig().setOpenedSuffix(property(camelContext, java.lang.String.class, value)); return true; case "overwrite": target.getConfig().setOverwrite(property(camelContext, boolean.class, value)); return true; case "owner": target.getConfig().setOwner(property(camelContext, java.lang.String.class, value)); return true; case "pattern": target.getConfig().setPattern(property(camelContext, java.lang.String.class, value)); return true; case "pollstrategy": case "pollStrategy": target.setPollStrategy(property(camelContext, org.apache.camel.spi.PollingConsumerPollStrategy.class, value)); return true; case "readsuffix": case "readSuffix": target.getConfig().setReadSuffix(property(camelContext, java.lang.String.class, value)); return true; case "repeatcount": case "repeatCount": target.setRepeatCount(property(camelContext, long.class, value)); return true; case "replication": target.getConfig().setReplication(property(camelContext, short.class, value)); return true; case "runlogginglevel": case "runLoggingLevel": target.setRunLoggingLevel(property(camelContext, org.apache.camel.LoggingLevel.class, value)); return true; case "scheduledexecutorservice": case "scheduledExecutorService": target.setScheduledExecutorService(property(camelContext, java.util.concurrent.ScheduledExecutorService.class, value)); return true; case "scheduler": target.setScheduler(property(camelContext, java.lang.String.class, value)); return true; case "schedulerproperties": case "schedulerProperties": target.setSchedulerProperties(property(camelContext, java.util.Map.class, value)); return true; case "sendemptymessagewhenidle": case "sendEmptyMessageWhenIdle": target.setSendEmptyMessageWhenIdle(property(camelContext, boolean.class, value)); return true; case "splitstrategy": case "splitStrategy": target.getConfig().setSplitStrategy(property(camelContext, java.lang.String.class, value)); return true; case "startscheduler": case "startScheduler": target.setStartScheduler(property(camelContext, boolean.class, value)); return true; case "streamdownload": case "streamDownload": target.getConfig().setStreamDownload(property(camelContext, boolean.class, value)); return true; case "synchronous": target.setSynchronous(property(camelContext, boolean.class, value)); return true; case "timeunit": case "timeUnit": target.setTimeUnit(property(camelContext, java.util.concurrent.TimeUnit.class, value)); return true; case "usefixeddelay": case "useFixedDelay": target.setUseFixedDelay(property(camelContext, boolean.class, value)); return true; case "valuetype": case "valueType": target.getConfig().setValueType(property(camelContext, org.apache.camel.component.hdfs.WritableType.class, value)); return true; default: return false; } } @Override public Map<String, Object> getAllOptions(Object target) { Map<String, Object> answer = new CaseInsensitiveMap(); answer.put("append", boolean.class); answer.put("backoffErrorThreshold", int.class); answer.put("backoffIdleThreshold", int.class); answer.put("backoffMultiplier", int.class); answer.put("basicPropertyBinding", boolean.class); answer.put("blockSize", long.class); answer.put("bridgeErrorHandler", boolean.class); answer.put("bufferSize", int.class); answer.put("checkIdleInterval", int.class); answer.put("chunkSize", int.class); answer.put("compressionCodec", org.apache.camel.component.hdfs.HdfsCompressionCodec.class); answer.put("compressionType", org.apache.hadoop.io.SequenceFile.CompressionType.class); answer.put("connectOnStartup", boolean.class); answer.put("delay", long.class); answer.put("exceptionHandler", org.apache.camel.spi.ExceptionHandler.class); answer.put("exchangePattern", org.apache.camel.ExchangePattern.class); answer.put("fileSystemType", org.apache.camel.component.hdfs.HdfsFileSystemType.class); answer.put("fileType", org.apache.camel.component.hdfs.HdfsFileType.class); answer.put("greedy", boolean.class); answer.put("initialDelay", long.class); answer.put("kerberosConfigFileLocation", java.lang.String.class); answer.put("kerberosKeytabLocation", java.lang.String.class); answer.put("kerberosUsername", java.lang.String.class); answer.put("keyType", org.apache.camel.component.hdfs.WritableType.class); answer.put("lazyStartProducer", boolean.class); answer.put("maxMessagesPerPoll", int.class); answer.put("namedNodes", java.lang.String.class); answer.put("openedSuffix", java.lang.String.class); answer.put("overwrite", boolean.class); answer.put("owner", java.lang.String.class); answer.put("pattern", java.lang.String.class); answer.put("pollStrategy", org.apache.camel.spi.PollingConsumerPollStrategy.class); answer.put("readSuffix", java.lang.String.class); answer.put("repeatCount", long.class); answer.put("replication", short.class); answer.put("runLoggingLevel", org.apache.camel.LoggingLevel.class); answer.put("scheduledExecutorService", java.util.concurrent.ScheduledExecutorService.class); answer.put("scheduler", java.lang.String.class); answer.put("schedulerProperties", java.util.Map.class); answer.put("sendEmptyMessageWhenIdle", boolean.class); answer.put("splitStrategy", java.lang.String.class); answer.put("startScheduler", boolean.class); answer.put("streamDownload", boolean.class); answer.put("synchronous", boolean.class); answer.put("timeUnit", java.util.concurrent.TimeUnit.class); answer.put("useFixedDelay", boolean.class); answer.put("valueType", org.apache.camel.component.hdfs.WritableType.class); return answer; } @Override public Object getOptionValue(Object obj, String name, boolean ignoreCase) { HdfsEndpoint target = (HdfsEndpoint) obj; switch (ignoreCase ? name.toLowerCase() : name) { case "append": return target.getConfig().isAppend(); case "backofferrorthreshold": case "backoffErrorThreshold": return target.getBackoffErrorThreshold(); case "backoffidlethreshold": case "backoffIdleThreshold": return target.getBackoffIdleThreshold(); case "backoffmultiplier": case "backoffMultiplier": return target.getBackoffMultiplier(); case "basicpropertybinding": case "basicPropertyBinding": return target.isBasicPropertyBinding(); case "blocksize": case "blockSize": return target.getConfig().getBlockSize(); case "bridgeerrorhandler": case "bridgeErrorHandler": return target.isBridgeErrorHandler(); case "buffersize": case "bufferSize": return target.getConfig().getBufferSize(); case "checkidleinterval": case "checkIdleInterval": return target.getConfig().getCheckIdleInterval(); case "chunksize": case "chunkSize": return target.getConfig().getChunkSize(); case "compressioncodec": case "compressionCodec": return target.getConfig().getCompressionCodec(); case "compressiontype": case "compressionType": return target.getConfig().getCompressionType(); case "connectonstartup": case "connectOnStartup": return target.getConfig().isConnectOnStartup(); case "delay": return target.getDelay(); case "exceptionhandler": case "exceptionHandler": return target.getExceptionHandler(); case "exchangepattern": case "exchangePattern": return target.getExchangePattern(); case "filesystemtype": case "fileSystemType": return target.getConfig().getFileSystemType(); case "filetype": case "fileType": return target.getConfig().getFileType(); case "greedy": return target.isGreedy(); case "initialdelay": case "initialDelay": return target.getInitialDelay(); case "kerberosconfigfilelocation": case "kerberosConfigFileLocation": return target.getConfig().getKerberosConfigFileLocation(); case "kerberoskeytablocation": case "kerberosKeytabLocation": return target.getConfig().getKerberosKeytabLocation(); case "kerberosusername": case "kerberosUsername": return target.getConfig().getKerberosUsername(); case "keytype": case "keyType": return target.getConfig().getKeyType(); case "lazystartproducer": case "lazyStartProducer": return target.isLazyStartProducer(); case "maxmessagesperpoll": case "maxMessagesPerPoll": return target.getConfig().getMaxMessagesPerPoll(); case "namednodes": case "namedNodes": return target.getConfig().getNamedNodes(); case "openedsuffix": case "openedSuffix": return target.getConfig().getOpenedSuffix(); case "overwrite": return target.getConfig().isOverwrite(); case "owner": return target.getConfig().getOwner(); case "pattern": return target.getConfig().getPattern(); case "pollstrategy": case "pollStrategy": return target.getPollStrategy(); case "readsuffix": case "readSuffix": return target.getConfig().getReadSuffix(); case "repeatcount": case "repeatCount": return target.getRepeatCount(); case "replication": return target.getConfig().getReplication(); case "runlogginglevel": case "runLoggingLevel": return target.getRunLoggingLevel(); case "scheduledexecutorservice": case "scheduledExecutorService": return target.getScheduledExecutorService(); case "scheduler": return target.getScheduler(); case "schedulerproperties": case "schedulerProperties": return target.getSchedulerProperties(); case "sendemptymessagewhenidle": case "sendEmptyMessageWhenIdle": return target.isSendEmptyMessageWhenIdle(); case "splitstrategy": case "splitStrategy": return target.getConfig().getSplitStrategy(); case "startscheduler": case "startScheduler": return target.isStartScheduler(); case "streamdownload": case "streamDownload": return target.getConfig().isStreamDownload(); case "synchronous": return target.isSynchronous(); case "timeunit": case "timeUnit": return target.getTimeUnit(); case "usefixeddelay": case "useFixedDelay": return target.isUseFixedDelay(); case "valuetype": case "valueType": return target.getConfig().getValueType(); default: return null; } } }
// Copyright (c) 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.content.browser; import android.content.Context; import android.location.Criteria; import android.location.Location; import android.location.LocationListener; import android.location.LocationManager; import android.os.Bundle; import android.os.Looper; import android.util.Log; import org.chromium.base.ActivityStatus; import org.chromium.base.CalledByNative; import org.chromium.base.ThreadUtils; import java.util.List; import java.util.concurrent.FutureTask; /** * Implements the Java side of LocationProviderAndroid. * Delegates all real functionality to the inner class. * See detailed documentation on * content/browser/geolocation/android_location_api_adapter.h. * Based on android.webkit.GeolocationService.java */ class LocationProvider { // Log tag private static final String TAG = "LocationProvider"; /** * This is the core of android location provider. It is a separate class for clarity * so that it can manage all processing completely in the UI thread. The container class * ensures that the start/stop calls into this class are done in the UI thread. */ private static class LocationProviderImpl implements LocationListener, ActivityStatus.StateListener { private Context mContext; private LocationManager mLocationManager; private boolean mIsRunning; private boolean mShouldRunAfterActivityResume; private boolean mIsGpsEnabled; LocationProviderImpl(Context context) { mContext = context; } @Override public void onActivityStateChange(int state) { if (state == ActivityStatus.PAUSED) { mShouldRunAfterActivityResume = mIsRunning; unregisterFromLocationUpdates(); } else if (state == ActivityStatus.RESUMED) { assert !mIsRunning; if (mShouldRunAfterActivityResume) { registerForLocationUpdates(); } } } /** * Start listening for location updates. * @param gpsEnabled Whether or not we're interested in high accuracy GPS. */ private void start(boolean gpsEnabled) { if (!mIsRunning && !mShouldRunAfterActivityResume) { // Currently idle so start listening to activity status changes. ActivityStatus.registerStateListener(this); } mIsGpsEnabled = gpsEnabled; if (ActivityStatus.isPaused()) { mShouldRunAfterActivityResume = true; } else { unregisterFromLocationUpdates(); registerForLocationUpdates(); } } /** * Stop listening for location updates. */ private void stop() { unregisterFromLocationUpdates(); ActivityStatus.unregisterStateListener(this); mShouldRunAfterActivityResume = false; } /** * Returns true if we are currently listening for location updates, false if not. */ private boolean isRunning() { return mIsRunning; } @Override public void onLocationChanged(Location location) { // Callbacks from the system location sevice are queued to this thread, so it's // possible that we receive callbacks after unregistering. At this point, the // native object will no longer exist. if (mIsRunning) { updateNewLocation(location); } } private void updateNewLocation(Location location) { nativeNewLocationAvailable(location.getLatitude(), location.getLongitude(), location.getTime() / 1000.0, location.hasAltitude(), location.getAltitude(), location.hasAccuracy(), location.getAccuracy(), location.hasBearing(), location.getBearing(), location.hasSpeed(), location.getSpeed()); } @Override public void onStatusChanged(String provider, int status, Bundle extras) { } @Override public void onProviderEnabled(String provider) { } @Override public void onProviderDisabled(String provider) { } private void ensureLocationManagerCreated() { if (mLocationManager != null) return; mLocationManager = (LocationManager) mContext.getSystemService( Context.LOCATION_SERVICE); if (mLocationManager == null) { Log.e(TAG, "Could not get location manager."); } } /** * Registers this object with the location service. */ private void registerForLocationUpdates() { ensureLocationManagerCreated(); if (usePassiveOneShotLocation()) return; assert !mIsRunning; mIsRunning = true; // We're running on the main thread. The C++ side is responsible to // bounce notifications to the Geolocation thread as they arrive in the mainLooper. try { Criteria criteria = new Criteria(); mLocationManager.requestLocationUpdates(0, 0, criteria, this, ThreadUtils.getUiThreadLooper()); if (mIsGpsEnabled) { criteria.setAccuracy(Criteria.ACCURACY_FINE); mLocationManager.requestLocationUpdates(0, 0, criteria, this, ThreadUtils.getUiThreadLooper()); } } catch(SecurityException e) { Log.e(TAG, "Caught security exception registering for location updates from " + "system. This should only happen in DumpRenderTree."); } catch(IllegalArgumentException e) { Log.e(TAG, "Caught IllegalArgumentException registering for location updates."); } } /** * Unregisters this object from the location service. */ private void unregisterFromLocationUpdates() { if (mIsRunning) { mIsRunning = false; mLocationManager.removeUpdates(this); } } private boolean usePassiveOneShotLocation() { if (!isOnlyPassiveLocationProviderEnabled()) return false; // Do not request a location update if the only available location provider is // the passive one. Make use of the last known location and call // onLocationChanged directly. final Location location = mLocationManager.getLastKnownLocation( LocationManager.PASSIVE_PROVIDER); if (location != null) { ThreadUtils.runOnUiThread(new Runnable() { @Override public void run() { updateNewLocation(location); } }); } return true; } /* * Checks if the passive location provider is the only provider available * in the system. */ private boolean isOnlyPassiveLocationProviderEnabled() { List<String> providers = mLocationManager.getProviders(true); return providers != null && providers.size() == 1 && providers.get(0).equals(LocationManager.PASSIVE_PROVIDER); } } // Delegate handling the real work in the main thread. private LocationProviderImpl mImpl; private LocationProvider(Context context) { mImpl = new LocationProviderImpl(context); } @CalledByNative static LocationProvider create(Context context) { return new LocationProvider(context); } /** * Start listening for location updates until we're told to quit. May be * called in any thread. * @param gpsEnabled Whether or not we're interested in high accuracy GPS. */ @CalledByNative public boolean start(final boolean gpsEnabled) { FutureTask<Void> task = new FutureTask<Void>(new Runnable() { @Override public void run() { mImpl.start(gpsEnabled); } }, null); ThreadUtils.runOnUiThread(task); return true; } /** * Stop listening for location updates. May be called in any thread. */ @CalledByNative public void stop() { FutureTask<Void> task = new FutureTask<Void>(new Runnable() { @Override public void run() { mImpl.stop(); } }, null); ThreadUtils.runOnUiThread(task); } /** * Returns true if we are currently listening for location updates, false if not. * Must be called only in the UI thread. */ public boolean isRunning() { assert ThreadUtils.runningOnUiThread(); return mImpl.isRunning(); } // Native functions public static native void nativeNewLocationAvailable( double latitude, double longitude, double timeStamp, boolean hasAltitude, double altitude, boolean hasAccuracy, double accuracy, boolean hasHeading, double heading, boolean hasSpeed, double speed); public static native void nativeNewErrorAvailable(String message); }
package org.cohorte.eclipse.runner.basic; import org.apache.felix.ipojo.annotations.Component; import org.apache.felix.ipojo.annotations.Instantiate; import org.apache.felix.ipojo.annotations.Invalidate; import org.apache.felix.ipojo.annotations.Provides; import org.apache.felix.ipojo.annotations.Requires; import org.apache.felix.ipojo.annotations.ServiceProperty; import org.apache.felix.ipojo.annotations.Validate; import org.cohorte.herald.HeraldException; import org.cohorte.herald.IHerald; import org.cohorte.herald.IMessageListener; import org.cohorte.herald.Message; import org.cohorte.herald.MessageReceived; import org.cohorte.herald.NoTransport; import org.cohorte.remote.IRemoteServicesConstants; import org.osgi.framework.BundleContext; import org.osgi.framework.BundleException; import org.osgi.service.event.Event; import org.osgi.service.event.EventConstants; import org.osgi.service.event.EventHandler; import org.psem2m.isolates.base.IIsolateLoggerSvc; import org.psem2m.utilities.CXStringUtils; /** * Gogo command to stop all Isolate on Eclipse developpement environement. * * MOD_OG_20170418 add event handler to allow the programmatic usage of the * shutdown command in the current isolate as the herald messages are not * delivred to their emmiter * * @author bdebbabi * @author ogattaz * */ @Component @Instantiate @Provides(specifications = { CShutdownGogoCommand.class, IMessageListener.class, EventHandler.class }) public class CShutdownGogoCommand implements IConstants, IMessageListener, EventHandler { /** * OSGi Bundle Context */ BundleContext pBundleContext; /** The Gogo commands */ @ServiceProperty(name = "osgi.command.function", value = "{shutdown}") private String[] pCommands; /** Herald message filters */ @ServiceProperty(name = org.cohorte.herald.IConstants.PROP_FILTERS, value = "{" + RUNNER_BASIC_SHUTDOWN_MESSAGE + "}") private String[] pFilters; @Requires private IHerald pHerald; /** * Cohorte Logger service */ @Requires private IIsolateLoggerSvc pLogger; /** * MOD_OG_20170418 The "pelix.remote.export.reject" property to limit the * remote export of the services */ @ServiceProperty(name = IRemoteServicesConstants.PROP_EXPORT_REJECT, immutable = true) private final String[] pNotRemote = { IMessageListener.class.getName(), EventHandler.class.getName() }; /** * The Gogo commands scope */ @ServiceProperty(name = "osgi.command.scope", value = "runner") private String pScope; // MOD_OG_20170418 an array ! @ServiceProperty(name = EventConstants.EVENT_TOPIC, value = "{" + RUNNER_BASIC_SHUTDOWN_TOPIC + "}") private String[] pTopics; /** * Constructor * * @param aBundleContext */ public CShutdownGogoCommand(final BundleContext aBundleContext) { pBundleContext = aBundleContext; // System.out.printf("devtool-basic-runner: %50s | instanciated \n", // this.getClass().getName()); } /** * Stops bundle 0 (OSGi framework). */ private void doShutdown() { pLogger.logInfo(this, "doShutdown", "trying to stop bundle 0 after 2 second..."); // stop actual isolate try { // wait the broadcast message to be sent Thread.sleep(2000); } catch (final InterruptedException e) { e.printStackTrace(); } try { pBundleContext.getBundle(0).stop(); pLogger.logInfo(this, "doShutdown", "bundle 0 stopped"); } catch (final BundleException e) { e.printStackTrace(); } } /** * MOD_OG_20170418 * * @see org.osgi.service.event.EventHandler#handleEvent(org.osgi.service.event * .Event) */ @Override public void handleEvent(Event aEvent) { pLogger.logInfo(this, "handleEvent", "receiving a event..."); try { String wEventTopic = aEvent.getTopic(); pLogger.logInfo(this, "handleEvent", "Event: %s", aEvent); if (RUNNER_BASIC_SHUTDOWN_TOPIC.equalsIgnoreCase(wEventTopic)) { doShutdown(); } } catch (Exception e) { if (pLogger == null) { e.printStackTrace(); } // else { pLogger.logSevere(this, "handleEvent", "ERROR: %s", e); } } } /* * (non-Javadoc) * * @see * org.cohorte.herald.IMessageListener#heraldMessage(org.cohorte.herald. * IHerald, org.cohorte.herald.MessageReceived) */ @Override public void heraldMessage(final IHerald aHerald, final MessageReceived aMessage) throws HeraldException { pLogger.logInfo(this, "heraldMessage", "receiving a Herald message..."); final String wSubject = aMessage.getSubject(); pLogger.logInfo(this, "heraldMessage", "subject=" + "wSubject"); if (RUNNER_BASIC_SHUTDOWN_MESSAGE.equalsIgnoreCase(wSubject)) { doShutdown(); } } /** * MOD_OG_20160905 Log traces enhancement */ @Invalidate public void invalidate() { pLogger.logInfo(this, "invalidate", "invalidated"); } /** * Shutdown Gogo command. */ public void shutdown() { pLogger.logInfo(this, "shutdown", "Fire SHUTDOWN message to all isolates"); try { pHerald.fireGroup("all", new Message(RUNNER_BASIC_SHUTDOWN_MESSAGE)); } catch (final NoTransport e) { e.printStackTrace(); } // as the method fireGroup send the message to the other but not at the // current isolate ! doShutdown(); } /** * MOD_OG_20160905 Log traces enhancement */ @Validate public void validate() { pLogger.logInfo(this, "validate", "Validating..."); pLogger.logInfo(this, "validate", "Commands=[%s]", CXStringUtils.stringTableToString(pCommands)); pLogger.logInfo(this, "validate", "Validated"); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package java.nio.charset; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.net.URL; import java.nio.ByteBuffer; import java.nio.CharBuffer; import java.nio.charset.spi.CharsetProvider; import java.security.AccessController; import java.security.PrivilegedAction; import java.util.Collections; import java.util.Comparator; import java.util.Enumeration; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.Locale; import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; import com.ibm.icu4jni.charset.CharsetProviderICU; /** * A charset defines a mapping between a Unicode character sequence and a byte * sequence. It facilitate the encoding from a Unicode character sequence into a * byte sequence, and the decoding from a byte sequence into a Unicode character * sequence. * <p> * A charset has a canonical name, which are usually in uppercase. Typically it * also has one or more aliases. The name string can only consist of the * following characters: '0' - '9', 'A' - 'Z', 'a' - 'z', '.', ':'. '-' and '_'. * The first character of the name must be a digit or a letter. * </p> * <p> * The following charsets should be supported by any java platforms: US-ASCII, * ISO-8859-1, UTF-8, UTF-16BE, UTF-16LE, UTF-16. * </p> * <p> * Additional charsets can be made available by configuring one or more charset * providers through provider configuration files. Such files are always named * as "java.nio.charset.spi.CharsetProvider" and located in the * "META-INF/services" sub folder of one or more classpaths. The files should be * encoded in "UTF-8". Each line of their content specifies the class name of a * charset provider which extends <code>java.nio.spi.CharsetProvider</code>. * A line should ends with '\r', '\n' or '\r\n'. Leading and trailing * whitespaces are trimmed. Blank lines, and lines (after trimmed) starting with * "#" which are regarded as comments, are both ignored. Duplicates of already * appeared names are also ignored. Both the configuration files and the * provider classes will be loaded using the thread context class loader. * </p> * <p> * This class is thread-safe. * </p> * * @see java.nio.charset.spi.CharsetProvider * */ public abstract class Charset implements Comparable<Charset> { /* * -------------------------------------------------------------------- * Constants * -------------------------------------------------------------------- */ /* * the name of configuration files where charset provider class names can be * specified. */ private static final String PROVIDER_CONFIGURATION_FILE_NAME = "META-INF/services/java.nio.charset.spi.CharsetProvider"; //$NON-NLS-1$ /* * the encoding of configuration files */ private static final String PROVIDER_CONFIGURATION_FILE_ENCODING = "UTF-8"; //$NON-NLS-1$ /* * the comment string used in configuration files */ private static final String PROVIDER_CONFIGURATION_FILE_COMMENT = "#"; //$NON-NLS-1$ private static ClassLoader systemClassLoader; /* * -------------------------------------------------------------------- * Class variables * -------------------------------------------------------------------- */ // built in provider instance, assuming thread-safe private static CharsetProviderICU _builtInProvider = null; // cached built in charsets private static TreeMap<String, Charset> _builtInCharsets = null; /* * -------------------------------------------------------------------- * Instance variables * -------------------------------------------------------------------- */ private final String canonicalName; // the aliases set private final HashSet<String> aliasesSet; // cached Charset table private static HashMap<String, Charset> cachedCharsetTable = new HashMap<String, Charset>(); // cached CharsetDecoder table private static HashMap<String, CharsetDecoder> cachedCharsetDecoderTable = new HashMap<String, CharsetDecoder>(); // cached CharsetEncoder table private static HashMap<String, CharsetEncoder> cachedCharsetEncoderTable = new HashMap<String, CharsetEncoder>(); /* * ------------------------------------------------------------------- * Global initialization * ------------------------------------------------------------------- */ static { /* * create built-in charset provider even if no privilege to access * charset provider. */ _builtInProvider = AccessController .doPrivileged(new PrivilegedAction<CharsetProviderICU>() { public CharsetProviderICU run() { return new CharsetProviderICU(); } }); } /* * ------------------------------------------------------------------- * Constructors * ------------------------------------------------------------------- */ /** * Constructs a <code>Charset</code> object. Duplicated aliases are * ignored. * * @param canonicalName * the canonical name of the charset * @param aliases * an array containing all aliases of the charset * @throws IllegalCharsetNameException * on an illegal value being supplied for either * <code>canonicalName</code> or for any element of * <code>aliases</code>. * */ protected Charset(String canonicalName, String[] aliases) throws IllegalCharsetNameException { // throw IllegalArgumentException if name is null if (null == canonicalName) { throw new NullPointerException(); } // check whether the given canonical name is legal checkCharsetName(canonicalName); this.canonicalName = canonicalName; // check each alias and put into a set this.aliasesSet = new HashSet<String>(); if (null != aliases) { for (int i = 0; i < aliases.length; i++) { checkCharsetName(aliases[i]); this.aliasesSet.add(aliases[i]); } } } /* * ------------------------------------------------------------------- * Methods * ------------------------------------------------------------------- */ /* * Checks whether a character is a special character that can be used in * charset names, other than letters and digits. */ private static boolean isSpecial(char c) { return ('-' == c || '.' == c || ':' == c || '_' == c); } /* * Checks whether a character is a letter (ascii) which are defined in Java * Spec. */ private static boolean isLetter(char c) { return ('a' <= c && c <= 'z') || ('A' <= c && c <= 'Z'); } /* * Checks whether a character is a digit (ascii) which are defined in Java * Spec. */ private static boolean isDigit(char c) { return ('0' <= c && c <= '9'); } /* * Checks whether a given string is a legal charset name. The argument name * should not be null. */ private static void checkCharsetName(String name) { // An empty string is illegal charset name if (name.length() == 0) { throw new IllegalCharsetNameException(name); } // The first character must be a letter or a digit // This is related to HARMONY-68 (won't fix) // char first = name.charAt(0); // if (!isLetter(first) && !isDigit(first)) { // throw new IllegalCharsetNameException(name); // } // Check the remaining characters int length = name.length(); for (int i = 0; i < length; i++) { char c = name.charAt(i); if (!isLetter(c) && !isDigit(c) && !isSpecial(c)) { throw new IllegalCharsetNameException(name); } } } /* * Use privileged code to get the context class loader. */ private static ClassLoader getContextClassLoader() { final Thread t = Thread.currentThread(); return AccessController .doPrivileged(new PrivilegedAction<ClassLoader>() { public ClassLoader run() { return t.getContextClassLoader(); } }); } /* * Use privileged code to get the system class loader. */ private static void getSystemClassLoader() { if (null == systemClassLoader) { systemClassLoader = AccessController .doPrivileged(new PrivilegedAction<ClassLoader>() { public ClassLoader run() { return ClassLoader.getSystemClassLoader(); } }); } } /* * Add the charsets supported by the given provider to the map. */ private static void addCharsets(CharsetProvider cp, TreeMap<String, Charset> charsets) { Iterator<Charset> it = cp.charsets(); while (it.hasNext()) { Charset cs = it.next(); // Only new charsets will be added if (!charsets.containsKey(cs.name())) { charsets.put(cs.name(), cs); } } } /* * Trim comment string, and then trim white spaces. */ private static String trimClassName(String name) { String trimedName = name; int index = name.indexOf(PROVIDER_CONFIGURATION_FILE_COMMENT); // Trim comments if (index != -1) { trimedName = name.substring(0, index); } return trimedName.trim(); } /* * Read a configuration file and add the charsets supported by the providers * specified by this configuration file to the map. */ private static void loadConfiguredCharsets(URL configFile, ClassLoader contextClassLoader, TreeMap<String, Charset> charsets) { BufferedReader reader = null; try { InputStream is = configFile.openStream(); // Read each line for charset provider class names reader = new BufferedReader(new InputStreamReader(is, PROVIDER_CONFIGURATION_FILE_ENCODING)); String providerClassName = reader.readLine(); while (null != providerClassName) { providerClassName = trimClassName(providerClassName); // Skip comments and blank lines if (providerClassName.length() > 0) { // Non empty string // Load the charset provider Object cp = null; try { Class<?> c = Class.forName(providerClassName, true, contextClassLoader); cp = c.newInstance(); } catch (Exception ex) { // try to use system classloader when context // classloader failed to load config file. try { getSystemClassLoader(); Class<?> c = Class.forName(providerClassName, true, systemClassLoader); cp = c.newInstance(); } catch (Exception e) { throw new Error(e.getMessage(), e); } } // Put the charsets supported by this provider into the map addCharsets((CharsetProvider) cp, charsets); } // Read the next line of the config file providerClassName = reader.readLine(); } } catch (IOException ex) { // Can't read this configuration file, ignore } finally { try { if (null != reader) { reader.close(); } } catch (IOException ex) { // Ignore closing exception } } } /** * Gets a map of all available charsets supported by the runtime. * <p> * The returned map contains mappings from canonical names to corresponding * instances of <code>Charset</code>. The canonical names can be * considered as case-insensitive. * </p> * * @return an unmodifiable map of all available charsets supported by the * runtime */ @SuppressWarnings("unchecked") public static SortedMap<String, Charset> availableCharsets() { // Initialize the built-in charsets map cache if necessary if (null == _builtInCharsets) { synchronized (Charset.class) { if (null == _builtInCharsets) { _builtInCharsets = new TreeMap<String, Charset>( IgnoreCaseComparator.getInstance()); _builtInProvider.putCharsets(_builtInCharsets); } } } // Add built-in charsets TreeMap<String, Charset> charsets = (TreeMap<String, Charset>) _builtInCharsets .clone(); // Collect all charsets provided by charset providers ClassLoader contextClassLoader = getContextClassLoader(); Enumeration<URL> e = null; try { if (null != contextClassLoader) { e = contextClassLoader .getResources(PROVIDER_CONFIGURATION_FILE_NAME); } else { getSystemClassLoader(); e = systemClassLoader .getResources(PROVIDER_CONFIGURATION_FILE_NAME); } // Examine each configuration file while (e.hasMoreElements()) { loadConfiguredCharsets(e.nextElement(), contextClassLoader, charsets); } } catch (IOException ex) { // Unexpected ClassLoader exception, ignore } return Collections.unmodifiableSortedMap(charsets); } /* * Read a configuration file and try to find the desired charset among those * which are supported by the providers specified in this configuration * file. */ private static Charset searchConfiguredCharsets(String charsetName, ClassLoader contextClassLoader, URL configFile) { BufferedReader reader = null; try { InputStream is = configFile.openStream(); // Read each line for charset provider class names reader = new BufferedReader(new InputStreamReader(is, PROVIDER_CONFIGURATION_FILE_ENCODING)); String providerClassName = reader.readLine(); while (null != providerClassName) { providerClassName = trimClassName(providerClassName); if (providerClassName.length() > 0) { // Non empty string // Load the charset provider Object cp = null; try { Class<?> c = Class.forName(providerClassName, true, contextClassLoader); cp = c.newInstance(); } catch (Exception ex) { // try to use system classloader when context // classloader failed to load config file. try { getSystemClassLoader(); Class<?> c = Class.forName(providerClassName, true, systemClassLoader); cp = c.newInstance(); } catch (SecurityException e) { throw e; } catch (Exception e) { throw new Error(e.getMessage(), e); } } // Try to get the desired charset from this provider Charset cs = ((CharsetProvider) cp) .charsetForName(charsetName); if (null != cs) { return cs; } } // Read the next line of the config file providerClassName = reader.readLine(); } return null; } catch (IOException ex) { // Can't read this configuration file return null; } finally { try { if (null != reader) { reader.close(); } } catch (IOException ex) { // Ignore closing exception } } } /* * Gets a <code> Charset </code> instance for the specified charset name. If * the charset is not supported, returns null instead of throwing an * exception. */ private static Charset forNameInternal(String charsetName) throws IllegalCharsetNameException { if (null == charsetName) { throw new IllegalArgumentException(); } checkCharsetName(charsetName); synchronized (Charset.class) { // Try to get Charset from cachedCharsetTable Charset cs = getCachedCharset(charsetName); if (null != cs) { return cs; } // Try built-in charsets cs = _builtInProvider.charsetForName(charsetName); if (null != cs) { cacheCharset(cs); return cs; } // Collect all charsets provided by charset providers ClassLoader contextClassLoader = getContextClassLoader(); Enumeration<URL> e = null; try { if (null != contextClassLoader) { e = contextClassLoader .getResources(PROVIDER_CONFIGURATION_FILE_NAME); } else { getSystemClassLoader(); e = systemClassLoader .getResources(PROVIDER_CONFIGURATION_FILE_NAME); } // Examine each configuration file while (e.hasMoreElements()) { cs = searchConfiguredCharsets(charsetName, contextClassLoader, e.nextElement()); if (null != cs) { cacheCharset(cs); return cs; } } } catch (IOException ex) { // Unexpected ClassLoader exception, ignore } } return null; } /* * save charset into cachedCharsetTable */ private static void cacheCharset(Charset cs) { cachedCharsetTable.put(cs.name(), cs); Set<String> aliasesSet = cs.aliases(); if (null != aliasesSet) { Iterator<String> iter = aliasesSet.iterator(); while (iter.hasNext()) { String alias = iter.next(); cachedCharsetTable.put(alias, cs); } } } /* * get cached charset reference by name */ private static Charset getCachedCharset(String name) { return cachedCharsetTable.get(name); } /** * Gets a <code>Charset</code> instance for the specified charset name. * * @param charsetName * the name of the charset * @return a <code>Charset</code> instance for the specified charset name * @throws IllegalCharsetNameException * If the specified charset name is illegal. * @throws UnsupportedCharsetException * If the desired charset is not supported by this runtime. */ public static Charset forName(String charsetName) throws IllegalCharsetNameException, UnsupportedCharsetException { Charset c = forNameInternal(charsetName); if (null == c) { throw new UnsupportedCharsetException(charsetName); } return c; } /** * Determines whether the specified charset is supported by this runtime. * * @param charsetName * the name of the charset * @return true if the specified charset is supported, otherwise false * @throws IllegalCharsetNameException * If the specified charset name is illegal. */ public static boolean isSupported(String charsetName) throws IllegalCharsetNameException { Charset cs = forNameInternal(charsetName); return (null != cs); } /** * Determines whether this charset is a super set of the given charset. * * @param charset * a given charset * @return true if this charset is a super set of the given charset, * otherwise false */ public abstract boolean contains(Charset charset); /** * Gets a new instance of encoder for this charset. * * @return a new instance of encoder for this charset */ public abstract CharsetEncoder newEncoder(); /** * Gets a new instance of decoder for this charset. * * @return a new instance of decoder for this charset */ public abstract CharsetDecoder newDecoder(); /** * Gets the canonical name of this charset. * * @return this charset's name in canonical form. */ public final String name() { return this.canonicalName; } /** * Gets the set of this charset's aliases. * * @return an unmodifiable set of this charset's aliases */ public final Set<String> aliases() { return Collections.unmodifiableSet(this.aliasesSet); } /** * Gets the name of this charset for the default locale. * * @return the name of this charset for the default locale */ public String displayName() { return this.canonicalName; } /** * Gets the name of this charset for the specified locale. * * @param l * a certain locale * @return the name of this charset for the specified locale */ public String displayName(Locale l) { return this.canonicalName; } /** * Answers whether this charset is known to be registered in the IANA * Charset Registry. * * @return true if the charset is known to be registered, otherwise returns * false. */ public final boolean isRegistered() { return !canonicalName.startsWith("x-") //$NON-NLS-1$ && !canonicalName.startsWith("X-"); //$NON-NLS-1$ } /** * Answers true if this charset supports encoding, otherwise false. * * @return true */ public boolean canEncode() { return true; } /** * Encodes the content of the give character buffer and outputs to a byte * buffer that is to be returned. * <p> * The default action in case of encoding errors is * <code>CodingErrorAction.REPLACE</code>. * </p> * * @param buffer * the character buffer containing the content to be encoded * @return the result of the encoding */ synchronized public final ByteBuffer encode(CharBuffer buffer) { CharsetEncoder e = getCachedCharsetEncoder(canonicalName); try { synchronized (e) { return e.encode(buffer); } } catch (CharacterCodingException ex) { throw new Error(ex.getMessage(), ex); } } /* * get cached CharsetEncoder by canonical name */ private CharsetEncoder getCachedCharsetEncoder(String name) { synchronized (cachedCharsetEncoderTable) { CharsetEncoder e = cachedCharsetEncoderTable .get(name); if (null == e) { e = this.newEncoder(); e.onMalformedInput(CodingErrorAction.REPLACE); e.onUnmappableCharacter(CodingErrorAction.REPLACE); cachedCharsetEncoderTable.put(name, e); } return e; } } /** * Encodes a string and outputs to a byte buffer that is to be returned. * <p> * The default action in case of encoding errors is * <code>CodingErrorAction.REPLACE</code>. * </p> * * @param s * the string to be encoded * @return the result of the encoding */ public final ByteBuffer encode(String s) { return encode(CharBuffer.wrap(s)); } /** * Decodes the content of the give byte buffer and outputs to a character * buffer that is to be returned. * <p> * The default action in case of decoding errors is * <code>CodingErrorAction.REPLACE</code>. * </p> * * @param buffer * the byte buffer containing the content to be decoded * @return a character buffer containing the output of the decoding */ public final CharBuffer decode(ByteBuffer buffer) { CharsetDecoder d = getCachedCharsetDecoder(canonicalName); try { synchronized (d) { return d.decode(buffer); } } catch (CharacterCodingException ex) { throw new Error(ex.getMessage(), ex); } } /* * get cached CharsetDecoder by canonical name */ private CharsetDecoder getCachedCharsetDecoder(String name) { synchronized (cachedCharsetDecoderTable) { CharsetDecoder d = cachedCharsetDecoderTable .get(name); if (null == d) { d = this.newDecoder(); d.onMalformedInput(CodingErrorAction.REPLACE); d.onUnmappableCharacter(CodingErrorAction.REPLACE); cachedCharsetDecoderTable.put(name, d); } return d; } } /* * ------------------------------------------------------------------- * Methods implementing parent interface Comparable * ------------------------------------------------------------------- */ /** * Compares this charset with the given charset. * * @param charset * the given object to be compared with * @return a negative integer if less than the given object, a positive * integer if larger than it, or 0 if equal to it */ public final int compareTo(Charset charset) { return this.canonicalName.compareToIgnoreCase(charset.canonicalName); } /* * ------------------------------------------------------------------- * Methods overriding parent class Object * ------------------------------------------------------------------- */ /** * Determines whether this charset equals to the given object. They are * considered to be equal if they have the same canonical name. * * @param obj * the given object to be compared with * @return true if they have the same canonical name, otherwise false */ @Override public final boolean equals(Object obj) { if (obj instanceof Charset) { Charset that = (Charset) obj; return this.canonicalName.equals(that.canonicalName); } return false; } /** * Gets the hash code of this charset. * * @return the hash code of this charset */ @Override public final int hashCode() { return this.canonicalName.hashCode(); } /** * Gets a string representation of this charset. Usually this contains the * canonical name of the charset. * * @return a string representation of this charset */ @Override public final String toString() { return "Charset[" + this.canonicalName + "]"; //$NON-NLS-1$//$NON-NLS-2$ } /** * Gets the system default charset from jvm. * * @return the default charset */ public static Charset defaultCharset() { Charset defaultCharset = null; String encoding = AccessController .doPrivileged(new PrivilegedAction<String>() { public String run() { return System.getProperty("file.encoding"); //$NON-NLS-1$ } }); try { defaultCharset = Charset.forName(encoding); } catch (UnsupportedCharsetException e) { defaultCharset = Charset.forName("UTF-8"); //$NON-NLS-1$ } return defaultCharset; } /** * A comparator that ignores case. */ static class IgnoreCaseComparator implements Comparator<String> { // the singleton private static Comparator<String> c = new IgnoreCaseComparator(); /* * Default constructor. */ private IgnoreCaseComparator() { // no action } /* * Gets a single instance. */ public static Comparator<String> getInstance() { return c; } /* * Compares two strings ignoring case. */ public int compare(String s1, String s2) { return s1.compareToIgnoreCase(s2); } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. package com.azure.resourcemanager.compute.models; import com.azure.core.annotation.Fluent; import com.azure.resourcemanager.compute.fluent.models.VirtualMachineScaleSetExtensionInner; import com.azure.resourcemanager.resources.fluentcore.arm.models.ChildResource; import com.azure.resourcemanager.resources.fluentcore.model.Attachable; import com.azure.resourcemanager.resources.fluentcore.model.HasInnerModel; import com.azure.resourcemanager.resources.fluentcore.model.Settable; import java.util.HashMap; import java.util.Map; /** An immutable client-side representation of an extension associated with virtual machines in a scale set. */ @Fluent public interface VirtualMachineScaleSetExtension extends HasInnerModel<VirtualMachineScaleSetExtensionInner>, ChildResource<VirtualMachineScaleSet> { /** @return the publisher name of the virtual machine scale set extension image this extension is created from */ String publisherName(); /** @return the type name of the virtual machine scale set extension image this extension is created from */ String typeName(); /** @return the version name of the virtual machine scale set extension image this extension is created from */ String versionName(); /** * @return true if this extension is configured to upgrade automatically when a new minor version of the extension * image that this extension based on is published */ boolean autoUpgradeMinorVersionEnabled(); /** @return the public settings of the virtual machine scale set extension as key value pairs */ Map<String, Object> publicSettings(); /** @return the public settings of the virtual machine extension as a JSON string */ String publicSettingsAsJsonString(); /** @return the provisioning state of this virtual machine scale set extension */ String provisioningState(); /** * The entirety of a virtual machine scale set extension definition as a part of a parent definition. * * @param <ParentT> the stage of the parent definition to return to after attaching this definition */ interface Definition<ParentT> extends DefinitionStages.Blank<ParentT>, DefinitionStages.WithImageOrPublisher<ParentT>, DefinitionStages.WithPublisher<ParentT>, DefinitionStages.WithType<ParentT>, DefinitionStages.WithVersion<ParentT>, DefinitionStages.WithAttach<ParentT> { } /** * Grouping of virtual machine scale set extension definition stages as a part of parent virtual machine scale set * definition. */ interface DefinitionStages { /** * The first stage of a virtual machine scale set extension definition. * * @param <ParentT> the stage of the parent definition to return to after attaching this definition */ interface Blank<ParentT> extends WithImageOrPublisher<ParentT> { } /** * The stage of a virtual machines scale set extension definition allowing to specify an extension image or * specify name of the virtual machine scale set extension publisher. * * @param <ParentT> the stage of the parent definition to return to after attaching this definition */ interface WithImageOrPublisher<ParentT> extends WithPublisher<ParentT> { /** * Specifies the virtual machine scale set extension image to use. * * @param image an extension image * @return the next stage of the definition */ WithAttach<ParentT> withImage(VirtualMachineExtensionImage image); } /** * The stage of a virtual machine scale set extension definition allowing to specify the publisher of the * extension image this extension is based on. * * @param <ParentT> the stage of the parent definition to return to after attaching this definition */ interface WithPublisher<ParentT> { /** * Specifies the name of the publisher of the virtual machine scale set extension image. * * @param extensionImagePublisherName a publisher name * @return the next stage of the definition */ WithType<ParentT> withPublisher(String extensionImagePublisherName); } /** * The stage of a virtual machine scale set extension definition allowing to specify the type of the virtual * machine scale set extension image this extension is based on. * * @param <ParentT> the stage of the parent definition to return to after attaching this definition */ interface WithType<ParentT> { /** * Specifies the type of the virtual machine scale set extension image. * * @param extensionImageTypeName the image type name * @return the next stage of the definition */ WithVersion<ParentT> withType(String extensionImageTypeName); } /** * The stage of a virtual machine scale set extension definition allowing to specify the type of the virtual * machine scale set extension version this extension is based on. * * @param <ParentT> the stage of the parent definition to return to after attaching this definition */ interface WithVersion<ParentT> { /** * Specifies the version of the virtual machine scale set image extension. * * @param extensionImageVersionName the version name * @return the next stage of the definition */ WithAttach<ParentT> withVersion(String extensionImageVersionName); } /** * The final stage of a virtual machine scale set extension definition. * * @param <ParentT> the stage of the parent definition to return to after attaching this definition */ interface WithAttach<ParentT> extends Attachable.InDefinition<ParentT>, WithAutoUpgradeMinorVersion<ParentT>, WithSettings<ParentT> { } /** * The stage of a virtual machine scale set extension definition allowing to enable or disable auto upgrade of * the extension when when a new minor version of virtual machine scale set extension image gets published. * * @param <ParentT> the stage of the parent definition to return to after attaching this definition */ interface WithAutoUpgradeMinorVersion<ParentT> { /** * Enables auto upgrading of the extension with minor versions. * * @return the next stage of the definition */ WithAttach<ParentT> withMinorVersionAutoUpgrade(); /** * Disables auto upgrading the extension with minor versions. * * @return the next stage of the definition */ WithAttach<ParentT> withoutMinorVersionAutoUpgrade(); } /** * The stage of a virtual machine scale set extension definition allowing to specify the public and private * settings. * * @param <ParentT> the stage of the parent definition to return to after attaching this definition */ interface WithSettings<ParentT> { /** * Specifies a public settings entry. * * @param key the key of a public settings entry * @param value the value of the public settings entry * @return the next stage of the definition */ WithAttach<ParentT> withPublicSetting(String key, Object value); /** * Specifies a private settings entry. * * @param key the key of a private settings entry * @param value the value of the private settings entry * @return the next stage of the definition */ WithAttach<ParentT> withProtectedSetting(String key, Object value); /** * Specifies public settings. * * @param settings the public settings * @return the next stage of the definition */ WithAttach<ParentT> withPublicSettings(HashMap<String, Object> settings); /** * Specifies private settings. * * @param settings the private settings * @return the next stage of the definition */ WithAttach<ParentT> withProtectedSettings(HashMap<String, Object> settings); } } /** * Grouping of virtual machine scale set extension definition stages as part of parent virtual machine scale set * update. */ interface UpdateDefinitionStages { /** * The first stage of a virtual machine scale set extension definition. * * @param <ParentT> the stage of the parent update to return to after attaching this definition */ interface Blank<ParentT> extends WithImageOrPublisher<ParentT> { } /** * The stage of a virtual machine scale set extension allowing to specify an extension image or the name of the * virtual machine extension publisher. * * @param <ParentT> the stage of the parent update to return to after attaching this definition */ interface WithImageOrPublisher<ParentT> extends WithPublisher<ParentT> { /** * Specifies the virtual machine scale set extension image to use. * * @param image an extension image * @return the next stage of the definition */ WithAttach<ParentT> withImage(VirtualMachineExtensionImage image); } /** * The stage of a virtual machine scale set extension definition allowing to specify the publisher of the * virtual machine scale set extension image this extension is based on. * * @param <ParentT> the stage of the parent update to return to after attaching this definition */ interface WithPublisher<ParentT> { /** * Specifies the name of the virtual machine scale set extension image publisher. * * @param extensionImagePublisherName the publisher name * @return the next stage of the definition */ WithType<ParentT> withPublisher(String extensionImagePublisherName); } /** * The stage of a virtual machine scale set extension definition allowing to specify the type of the virtual * machine scale set extension image this extension is based on. * * @param <ParentT> the stage of the parent update to return to after attaching this definition */ interface WithType<ParentT> { /** * Specifies the type of the virtual machine scale set extension image. * * @param extensionImageTypeName an image type name * @return the next stage of the definition */ WithVersion<ParentT> withType(String extensionImageTypeName); } /** * The stage of a virtual machine scale set extension definition allowing to specify the type of the virtual * machine scale set extension version this extension is based on. * * @param <ParentT> the stage of the parent update to return to after attaching this definition */ interface WithVersion<ParentT> { /** * Specifies the version of the virtual machine scale set image extension. * * @param extensionImageVersionName a version name * @return the next stage of the definition */ WithAttach<ParentT> withVersion(String extensionImageVersionName); } /** * The final stage of the virtual machine scale set extension definition. * * @param <ParentT> the stage of the parent update to return to after attaching this definition */ interface WithAttach<ParentT> extends Attachable.InUpdate<ParentT>, WithAutoUpgradeMinorVersion<ParentT>, WithSettings<ParentT> { } /** * The stage of a virtual machine scale set extension definition allowing to enable or disable auto upgrade of * the extension when when a new minor version of virtual machine scale set extension image gets published. * * @param <ParentT> the stage of the parent update to return to after attaching this definition */ interface WithAutoUpgradeMinorVersion<ParentT> { /** * Enables auto upgrading of the extension with minor versions. * * @return the next stage of the definition */ WithAttach<ParentT> withMinorVersionAutoUpgrade(); /** * Disables auto upgrade of the extension with minor versions. * * @return the next stage of the definition */ WithAttach<ParentT> withoutMinorVersionAutoUpgrade(); } /** * The stage of a virtual machine scale set extension definition allowing to specify the public and private * settings. * * @param <ParentT> the stage of the parent update to return to after attaching this definition */ interface WithSettings<ParentT> { /** * Specifies a public settings entry. * * @param key the key of a public settings entry * @param value the value of the public settings entry * @return the next stage of the definition */ WithAttach<ParentT> withPublicSetting(String key, Object value); /** * Specifies a private settings entry. * * @param key the key of a private settings entry * @param value the value of the private settings entry * @return the next stage of the definition */ WithAttach<ParentT> withProtectedSetting(String key, Object value); /** * Specifies public settings. * * @param settings the public settings * @return the next stage of the definition */ WithAttach<ParentT> withPublicSettings(HashMap<String, Object> settings); /** * Specifies private settings. * * @param settings the private settings * @return the next stage of the definition */ WithAttach<ParentT> withProtectedSettings(HashMap<String, Object> settings); } } /** * The entirety of a virtual machine scale set extension definition as a part of parent update. * * @param <ParentT> the stage of the parent update to return to after attaching this definition */ interface UpdateDefinition<ParentT> extends UpdateDefinitionStages.Blank<ParentT>, UpdateDefinitionStages.WithImageOrPublisher<ParentT>, UpdateDefinitionStages.WithPublisher<ParentT>, UpdateDefinitionStages.WithType<ParentT>, UpdateDefinitionStages.WithVersion<ParentT>, UpdateDefinitionStages.WithAttach<ParentT> { } /** Grouping of virtual machine extension update stages. */ interface UpdateStages { /** * The stage of a virtual machine scale set extension update allowing to enable or disable auto upgrade of the * extension when when a new minor version of virtual machine scale set extension image gets published. */ interface WithAutoUpgradeMinorVersion { /** * Enables auto-upgrading of the extension with minor versions. * * @return the next stage of the update */ Update withMinorVersionAutoUpgrade(); /** * Disables auto upgrading of the extension with minor versions. * * @return the next stage of the update */ Update withoutMinorVersionAutoUpgrade(); } /** * The stage of a virtual machine scale set extension update allowing to add or update public and private * settings. */ interface WithSettings { /** * Specifies a public settings entry. * * @param key the key of a public settings entry * @param value the value of the public settings entry * @return the next stage of the update */ Update withPublicSetting(String key, Object value); /** * Specifies a private settings entry. * * @param key the key of a private settings entry * @param value the value of the private settings entry * @return the next stage of the update */ Update withProtectedSetting(String key, Object value); /** * Specifies public settings. * * @param settings the public settings * @return the next stage of the update */ Update withPublicSettings(HashMap<String, Object> settings); /** * Specifies private settings. * * @param settings the private settings * @return the next stage of the update */ Update withProtectedSettings(HashMap<String, Object> settings); } } /** * The entirety of virtual machine scale set extension update as a part of parent virtual machine scale set update. */ interface Update extends Settable<VirtualMachineScaleSet.Update>, UpdateStages.WithAutoUpgradeMinorVersion, UpdateStages.WithSettings { } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.dialogflow.v2.model; /** * Represents the result of conversational query or event processing. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the Dialogflow API. For a detailed explanation see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class GoogleCloudDialogflowV2beta1QueryResult extends com.google.api.client.json.GenericJson { /** * The action name from the matched intent. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String action; /** * This field is set to: * * - `false` if the matched intent has required parameters and not all of the required * parameter values have been collected. - `true` if all required parameter values have been * collected, or if the matched intent doesn't contain any required parameters. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean allRequiredParamsPresent; /** * The free-form diagnostic info. For example, this field could contain webhook call latency. The * string keys of the Struct's fields map can change without notice. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.Map<String, java.lang.Object> diagnosticInfo; /** * The collection of rich messages to present to the user. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<GoogleCloudDialogflowV2beta1IntentMessage> fulfillmentMessages; static { // hack to force ProGuard to consider GoogleCloudDialogflowV2beta1IntentMessage used, since otherwise it would be stripped out // see https://github.com/google/google-api-java-client/issues/543 com.google.api.client.util.Data.nullOf(GoogleCloudDialogflowV2beta1IntentMessage.class); } /** * The text to be pronounced to the user or shown on the screen. Note: This is a legacy field, * `fulfillment_messages` should be preferred. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String fulfillmentText; /** * The intent that matched the conversational query. Some, not all fields are filled in this * message, including but not limited to: `name`, `display_name` and `webhook_state`. * The value may be {@code null}. */ @com.google.api.client.util.Key private GoogleCloudDialogflowV2beta1Intent intent; /** * The intent detection confidence. Values range from 0.0 (completely uncertain) to 1.0 * (completely certain). If there are `multiple knowledge_answers` messages, this value is set to * the greatest `knowledgeAnswers.match_confidence` value in the list. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Float intentDetectionConfidence; /** * The result from Knowledge Connector (if any), ordered by decreasing * `KnowledgeAnswers.match_confidence`. * The value may be {@code null}. */ @com.google.api.client.util.Key private GoogleCloudDialogflowV2beta1KnowledgeAnswers knowledgeAnswers; /** * The language that was triggered during intent detection. See [Language * Support](https://cloud.google.com/dialogflow-enterprise/docs/reference/language) for a list of * the currently supported language codes. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String languageCode; /** * The collection of output contexts. If applicable, `output_contexts.parameters` contains entries * with name `.original` containing the original parameter values before the query. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<GoogleCloudDialogflowV2beta1Context> outputContexts; static { // hack to force ProGuard to consider GoogleCloudDialogflowV2beta1Context used, since otherwise it would be stripped out // see https://github.com/google/google-api-java-client/issues/543 com.google.api.client.util.Data.nullOf(GoogleCloudDialogflowV2beta1Context.class); } /** * The collection of extracted parameters. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.Map<String, java.lang.Object> parameters; /** * The original conversational query text: * * - If natural language text was provided as input, `query_text` contains a copy of the input. * - If natural language speech audio was provided as input, `query_text` contains the speech * recognition result. If speech recognizer produced multiple alternatives, a particular one is * picked. - If automatic spell correction is enabled, `query_text` will contain the corrected * user input. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String queryText; /** * The sentiment analysis result, which depends on the `sentiment_analysis_request_config` * specified in the request. * The value may be {@code null}. */ @com.google.api.client.util.Key private GoogleCloudDialogflowV2beta1SentimentAnalysisResult sentimentAnalysisResult; /** * The Speech recognition confidence between 0.0 and 1.0. A higher number indicates an estimated * greater likelihood that the recognized words are correct. The default of 0.0 is a sentinel * value indicating that confidence was not set. * * This field is not guaranteed to be accurate or set. In particular this field isn't set for * StreamingDetectIntent since the streaming endpoint has separate confidence estimates per * portion of the audio in StreamingRecognitionResult. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Float speechRecognitionConfidence; /** * If the query was fulfilled by a webhook call, this field is set to the value of the `payload` * field returned in the webhook response. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.Map<String, java.lang.Object> webhookPayload; /** * If the query was fulfilled by a webhook call, this field is set to the value of the `source` * field returned in the webhook response. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String webhookSource; /** * The action name from the matched intent. * @return value or {@code null} for none */ public java.lang.String getAction() { return action; } /** * The action name from the matched intent. * @param action action or {@code null} for none */ public GoogleCloudDialogflowV2beta1QueryResult setAction(java.lang.String action) { this.action = action; return this; } /** * This field is set to: * * - `false` if the matched intent has required parameters and not all of the required * parameter values have been collected. - `true` if all required parameter values have been * collected, or if the matched intent doesn't contain any required parameters. * @return value or {@code null} for none */ public java.lang.Boolean getAllRequiredParamsPresent() { return allRequiredParamsPresent; } /** * This field is set to: * * - `false` if the matched intent has required parameters and not all of the required * parameter values have been collected. - `true` if all required parameter values have been * collected, or if the matched intent doesn't contain any required parameters. * @param allRequiredParamsPresent allRequiredParamsPresent or {@code null} for none */ public GoogleCloudDialogflowV2beta1QueryResult setAllRequiredParamsPresent(java.lang.Boolean allRequiredParamsPresent) { this.allRequiredParamsPresent = allRequiredParamsPresent; return this; } /** * The free-form diagnostic info. For example, this field could contain webhook call latency. The * string keys of the Struct's fields map can change without notice. * @return value or {@code null} for none */ public java.util.Map<String, java.lang.Object> getDiagnosticInfo() { return diagnosticInfo; } /** * The free-form diagnostic info. For example, this field could contain webhook call latency. The * string keys of the Struct's fields map can change without notice. * @param diagnosticInfo diagnosticInfo or {@code null} for none */ public GoogleCloudDialogflowV2beta1QueryResult setDiagnosticInfo(java.util.Map<String, java.lang.Object> diagnosticInfo) { this.diagnosticInfo = diagnosticInfo; return this; } /** * The collection of rich messages to present to the user. * @return value or {@code null} for none */ public java.util.List<GoogleCloudDialogflowV2beta1IntentMessage> getFulfillmentMessages() { return fulfillmentMessages; } /** * The collection of rich messages to present to the user. * @param fulfillmentMessages fulfillmentMessages or {@code null} for none */ public GoogleCloudDialogflowV2beta1QueryResult setFulfillmentMessages(java.util.List<GoogleCloudDialogflowV2beta1IntentMessage> fulfillmentMessages) { this.fulfillmentMessages = fulfillmentMessages; return this; } /** * The text to be pronounced to the user or shown on the screen. Note: This is a legacy field, * `fulfillment_messages` should be preferred. * @return value or {@code null} for none */ public java.lang.String getFulfillmentText() { return fulfillmentText; } /** * The text to be pronounced to the user or shown on the screen. Note: This is a legacy field, * `fulfillment_messages` should be preferred. * @param fulfillmentText fulfillmentText or {@code null} for none */ public GoogleCloudDialogflowV2beta1QueryResult setFulfillmentText(java.lang.String fulfillmentText) { this.fulfillmentText = fulfillmentText; return this; } /** * The intent that matched the conversational query. Some, not all fields are filled in this * message, including but not limited to: `name`, `display_name` and `webhook_state`. * @return value or {@code null} for none */ public GoogleCloudDialogflowV2beta1Intent getIntent() { return intent; } /** * The intent that matched the conversational query. Some, not all fields are filled in this * message, including but not limited to: `name`, `display_name` and `webhook_state`. * @param intent intent or {@code null} for none */ public GoogleCloudDialogflowV2beta1QueryResult setIntent(GoogleCloudDialogflowV2beta1Intent intent) { this.intent = intent; return this; } /** * The intent detection confidence. Values range from 0.0 (completely uncertain) to 1.0 * (completely certain). If there are `multiple knowledge_answers` messages, this value is set to * the greatest `knowledgeAnswers.match_confidence` value in the list. * @return value or {@code null} for none */ public java.lang.Float getIntentDetectionConfidence() { return intentDetectionConfidence; } /** * The intent detection confidence. Values range from 0.0 (completely uncertain) to 1.0 * (completely certain). If there are `multiple knowledge_answers` messages, this value is set to * the greatest `knowledgeAnswers.match_confidence` value in the list. * @param intentDetectionConfidence intentDetectionConfidence or {@code null} for none */ public GoogleCloudDialogflowV2beta1QueryResult setIntentDetectionConfidence(java.lang.Float intentDetectionConfidence) { this.intentDetectionConfidence = intentDetectionConfidence; return this; } /** * The result from Knowledge Connector (if any), ordered by decreasing * `KnowledgeAnswers.match_confidence`. * @return value or {@code null} for none */ public GoogleCloudDialogflowV2beta1KnowledgeAnswers getKnowledgeAnswers() { return knowledgeAnswers; } /** * The result from Knowledge Connector (if any), ordered by decreasing * `KnowledgeAnswers.match_confidence`. * @param knowledgeAnswers knowledgeAnswers or {@code null} for none */ public GoogleCloudDialogflowV2beta1QueryResult setKnowledgeAnswers(GoogleCloudDialogflowV2beta1KnowledgeAnswers knowledgeAnswers) { this.knowledgeAnswers = knowledgeAnswers; return this; } /** * The language that was triggered during intent detection. See [Language * Support](https://cloud.google.com/dialogflow-enterprise/docs/reference/language) for a list of * the currently supported language codes. * @return value or {@code null} for none */ public java.lang.String getLanguageCode() { return languageCode; } /** * The language that was triggered during intent detection. See [Language * Support](https://cloud.google.com/dialogflow-enterprise/docs/reference/language) for a list of * the currently supported language codes. * @param languageCode languageCode or {@code null} for none */ public GoogleCloudDialogflowV2beta1QueryResult setLanguageCode(java.lang.String languageCode) { this.languageCode = languageCode; return this; } /** * The collection of output contexts. If applicable, `output_contexts.parameters` contains entries * with name `.original` containing the original parameter values before the query. * @return value or {@code null} for none */ public java.util.List<GoogleCloudDialogflowV2beta1Context> getOutputContexts() { return outputContexts; } /** * The collection of output contexts. If applicable, `output_contexts.parameters` contains entries * with name `.original` containing the original parameter values before the query. * @param outputContexts outputContexts or {@code null} for none */ public GoogleCloudDialogflowV2beta1QueryResult setOutputContexts(java.util.List<GoogleCloudDialogflowV2beta1Context> outputContexts) { this.outputContexts = outputContexts; return this; } /** * The collection of extracted parameters. * @return value or {@code null} for none */ public java.util.Map<String, java.lang.Object> getParameters() { return parameters; } /** * The collection of extracted parameters. * @param parameters parameters or {@code null} for none */ public GoogleCloudDialogflowV2beta1QueryResult setParameters(java.util.Map<String, java.lang.Object> parameters) { this.parameters = parameters; return this; } /** * The original conversational query text: * * - If natural language text was provided as input, `query_text` contains a copy of the input. * - If natural language speech audio was provided as input, `query_text` contains the speech * recognition result. If speech recognizer produced multiple alternatives, a particular one is * picked. - If automatic spell correction is enabled, `query_text` will contain the corrected * user input. * @return value or {@code null} for none */ public java.lang.String getQueryText() { return queryText; } /** * The original conversational query text: * * - If natural language text was provided as input, `query_text` contains a copy of the input. * - If natural language speech audio was provided as input, `query_text` contains the speech * recognition result. If speech recognizer produced multiple alternatives, a particular one is * picked. - If automatic spell correction is enabled, `query_text` will contain the corrected * user input. * @param queryText queryText or {@code null} for none */ public GoogleCloudDialogflowV2beta1QueryResult setQueryText(java.lang.String queryText) { this.queryText = queryText; return this; } /** * The sentiment analysis result, which depends on the `sentiment_analysis_request_config` * specified in the request. * @return value or {@code null} for none */ public GoogleCloudDialogflowV2beta1SentimentAnalysisResult getSentimentAnalysisResult() { return sentimentAnalysisResult; } /** * The sentiment analysis result, which depends on the `sentiment_analysis_request_config` * specified in the request. * @param sentimentAnalysisResult sentimentAnalysisResult or {@code null} for none */ public GoogleCloudDialogflowV2beta1QueryResult setSentimentAnalysisResult(GoogleCloudDialogflowV2beta1SentimentAnalysisResult sentimentAnalysisResult) { this.sentimentAnalysisResult = sentimentAnalysisResult; return this; } /** * The Speech recognition confidence between 0.0 and 1.0. A higher number indicates an estimated * greater likelihood that the recognized words are correct. The default of 0.0 is a sentinel * value indicating that confidence was not set. * * This field is not guaranteed to be accurate or set. In particular this field isn't set for * StreamingDetectIntent since the streaming endpoint has separate confidence estimates per * portion of the audio in StreamingRecognitionResult. * @return value or {@code null} for none */ public java.lang.Float getSpeechRecognitionConfidence() { return speechRecognitionConfidence; } /** * The Speech recognition confidence between 0.0 and 1.0. A higher number indicates an estimated * greater likelihood that the recognized words are correct. The default of 0.0 is a sentinel * value indicating that confidence was not set. * * This field is not guaranteed to be accurate or set. In particular this field isn't set for * StreamingDetectIntent since the streaming endpoint has separate confidence estimates per * portion of the audio in StreamingRecognitionResult. * @param speechRecognitionConfidence speechRecognitionConfidence or {@code null} for none */ public GoogleCloudDialogflowV2beta1QueryResult setSpeechRecognitionConfidence(java.lang.Float speechRecognitionConfidence) { this.speechRecognitionConfidence = speechRecognitionConfidence; return this; } /** * If the query was fulfilled by a webhook call, this field is set to the value of the `payload` * field returned in the webhook response. * @return value or {@code null} for none */ public java.util.Map<String, java.lang.Object> getWebhookPayload() { return webhookPayload; } /** * If the query was fulfilled by a webhook call, this field is set to the value of the `payload` * field returned in the webhook response. * @param webhookPayload webhookPayload or {@code null} for none */ public GoogleCloudDialogflowV2beta1QueryResult setWebhookPayload(java.util.Map<String, java.lang.Object> webhookPayload) { this.webhookPayload = webhookPayload; return this; } /** * If the query was fulfilled by a webhook call, this field is set to the value of the `source` * field returned in the webhook response. * @return value or {@code null} for none */ public java.lang.String getWebhookSource() { return webhookSource; } /** * If the query was fulfilled by a webhook call, this field is set to the value of the `source` * field returned in the webhook response. * @param webhookSource webhookSource or {@code null} for none */ public GoogleCloudDialogflowV2beta1QueryResult setWebhookSource(java.lang.String webhookSource) { this.webhookSource = webhookSource; return this; } @Override public GoogleCloudDialogflowV2beta1QueryResult set(String fieldName, Object value) { return (GoogleCloudDialogflowV2beta1QueryResult) super.set(fieldName, value); } @Override public GoogleCloudDialogflowV2beta1QueryResult clone() { return (GoogleCloudDialogflowV2beta1QueryResult) super.clone(); } }
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package ruter.simulator; import java.awt.Color; import java.awt.Component; import java.awt.Graphics; import java.util.ArrayList; import java.util.Iterator; import java.util.LinkedList; import ruter.map.Map; import ruter.network.Network; import ruter.network.Node; import ruter.network.UnavailableInformationException; import ruter.visual.MapDrawer; import ruter.visual.Painter; /** * * @author Camilo Ortegon */ public class Simulation extends Thread { /** * Constants */ public static long ACCURACY_HIGH = 50; public static long ACCURACY_MEDIUM = 100; public static long ACCURACY_LOW = 200; private long simulationTime; // In milliseconds private int lastSleepTime; // In milliseconds private long paintingTime; // In milliseconds private double frameTime; // In seconds private double simulationSpeed; private long lastPaint; private long realTime; // In milliseconds private int windowWidth, windowHeight; private boolean running, pause; private Map map; private MapDrawer mapDrawer; private ArrayList<Vehicle> vehicles; private ArrayList<Color> vehicleColors; private LinkedList<Updatable> updateElements; private Network network; private Component canvas; // Colors private Color borderColor = Color.red; private Color networkColor = Color.blue; /** * This is class contains the simulation thread. * You can specified at the beginning what do you want to simulate. * Speed of the simulation can be modified on the way. * Start method must be called when you're ready to begging, and after you can only pause, resume or resume. No restart is allowed. * @param canvas the component to paint the simulation, it could be null. * The component should call method repaint(Graphics) from this class to make changes visible. * @param map of the simulation * @param network if you want to simulate the model in a network or null otherwise. * @param simulationTime given in milliseconds.It represents the accuracy of the simulation. * Is recommended to use one of the use (ACCURACY_HIGH, ACCURACY_MEDIUM or ACCURACY_LOW). * Using other value is up to you but it may be not efficient or not accurate. */ public Simulation(Component canvas, Map map, Network network, int windowWidth, int windowHeight, long simulationTime) { this.simulationSpeed = 1; this.simulationTime = simulationTime; this.paintingTime = simulationTime; this.frameTime = simulationTime / 1000d; this.canvas = canvas; this.map = map; this.windowWidth = windowWidth; this.windowHeight = windowHeight; this.mapDrawer = new MapDrawer(map); this.network = network; this.vehicles = new ArrayList<Vehicle>(); this.vehicleColors = new ArrayList<Color>(); resumeSimulation(); } public void setBorderColor(Color color) { this.borderColor = color; } public void setPaintingSpeed(long paintingTime) { this.paintingTime = paintingTime; } public Map getMap() { return map; } public Network getNetwork() { return network; } public void addVehicle(Vehicle vehicle, Color color) { vehicles.add(vehicle); vehicleColors.add(color); } public ArrayList<Vehicle> getVehicles() { return vehicles; } public void addUpdatableElementToThread(Updatable element) { if(updateElements == null) { updateElements = new LinkedList<Updatable>(); } updateElements.add(element); } public double getSimulationSpeed() { return simulationSpeed; } public void increaseSimulationSpeed() { this.simulationSpeed /= 2; } public void decreaseSimulationSpeed() { this.simulationSpeed *= 2; } public void setRealTimeSimulation() { this.simulationSpeed = 1; } public void setMaximumSimulationSpeed() { this.simulationSpeed = -1; } @Override public void run() { running = true; while (running) { if(pause) { try { synchronized (this) { wait(); } } catch (InterruptedException e) {} } realTime += simulationTime; long runningTime = System.currentTimeMillis(); for (int i = 0; i < vehicles.size(); i++) { vehicles.get(i).drive(frameTime); } if(network != null) { network.updateConnections(realTime); } if(updateElements != null) { Iterator<Updatable> it = updateElements.iterator(); while(it.hasNext()) { it.next().update(this); } } if(canvas != null) repaint(); if(simulationSpeed > 0) { int sleepTime = (int) (simulationTime * simulationSpeed); runningTime = System.currentTimeMillis() - runningTime; if (runningTime < sleepTime) { try {Thread.sleep(sleepTime - runningTime);} catch (InterruptedException ex) {} } lastSleepTime = (int) (sleepTime - runningTime); } else { } } } public void pauseSimulation() { pause = true; } public void resumeSimulation() { lastPaint = System.currentTimeMillis(); pause = false; synchronized(this) { notify(); } } public void stopSimulation() { running = false; } private void repaint() { long timeSinceLastPainting = System.currentTimeMillis()-lastPaint; if(timeSinceLastPainting*1.05 > paintingTime) { canvas.repaint(); lastPaint = System.currentTimeMillis(); } } /** * This method must be called in the paintComponent(Graphics) method that you passed in the constructor. * @param graphics * @param ppm is the pixels per meter. Determines the scale of the painting. */ public void repaint(Graphics graphics, double ppm) { // Square surrounding graphics.setColor(borderColor); graphics.drawRect(0, 0, (int) (map.getXBounds() * ppm), (int) (map.getYBounds() * ppm)); graphics.drawRect(1, 1, (int) (map.getXBounds() * ppm) - 2, (int) (map.getYBounds() * ppm) - 2); // Painting the map mapDrawer.repaint(graphics, windowWidth, windowHeight, 0, 0, ppm); // Painting the connections if(network != null) { graphics.setColor(networkColor); for (int i = 0; i < network.getNodes().size(); i++) { Node node = network.getNodes().get(i); Iterator<Node> it = network.getConnections(i); while (it.hasNext()) { try { Node neightbour = it.next(); if (neightbour.getId() > node.getId()) { graphics.drawLine((int) (node.getX() * ppm), (int) (node.getY() * ppm), (int) (neightbour.getX() * ppm), (int) (neightbour.getY() * ppm)); } } catch (UnavailableInformationException ex) { } } } } // Painting the robots for (int i = 0; i < vehicles.size(); i++) { graphics.setColor(vehicleColors.get(i)); Painter.paintVehicle(graphics, vehicles.get(i), ppm); } } /** * Simulation class also controls the time since simulation has started but in the simulation scale because it can goes faster or slower than the real life. * @return real time simulation in milliseconds. */ public long getRealTime() { return realTime; } }
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.4-2 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2015.07.07 at 05:42:14 PM CEST // package generated_e57; import java.math.BigDecimal; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlType; import javax.xml.datatype.XMLGregorianCalendar; /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element ref="{}guid"/> * &lt;element ref="{}name"/> * &lt;element ref="{}original_guids"/> * &lt;element ref="{}description"/> * &lt;element ref="{}sensor_vendor"/> * &lt;element ref="{}sensor_model"/> * &lt;element ref="{}sensor_serial_number"/> * &lt;element ref="{}sensor_hardware_version"/> * &lt;element ref="{}sensor_software_version"/> * &lt;element ref="{}sensor_firmware_version"/> * &lt;element ref="{}temperature"/> * &lt;element ref="{}relative_humidity"/> * &lt;element ref="{}atmospheric_pressure"/> * &lt;element ref="{}acquisition_start"/> * &lt;element ref="{}acquisition_end"/> * &lt;element ref="{}pose"/> * &lt;element ref="{}index_bounds"/> * &lt;element ref="{}cartesian_bounds"/> * &lt;element ref="{}sphericalbounds"/> * &lt;element ref="{}intensity_limits"/> * &lt;element ref="{}color_limits"/> * &lt;element ref="{}pointSize"/> * &lt;element ref="{}point_fields"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "guid", "name", "originalGuids", "description", "sensorVendor", "sensorModel", "sensorSerialNumber", "sensorHardwareVersion", "sensorSoftwareVersion", "sensorFirmwareVersion", "temperature", "relativeHumidity", "atmosphericPressure", "acquisitionStart", "acquisitionEnd", "pose", "indexBounds", "cartesianBounds", "sphericalbounds", "intensityLimits", "colorLimits", "pointSize", "pointFields" }) @XmlRootElement(name = "e57scan") public class E57Scan { @XmlElement(required = true) protected String guid; @XmlElement(required = true) protected String name; @XmlElement(name = "original_guids", required = true) protected String originalGuids; @XmlElement(required = true) protected String description; @XmlElement(name = "sensor_vendor", required = true) protected String sensorVendor; @XmlElement(name = "sensor_model", required = true) protected String sensorModel; @XmlElement(name = "sensor_serial_number", required = true) protected String sensorSerialNumber; @XmlElement(name = "sensor_hardware_version", required = true) protected String sensorHardwareVersion; @XmlElement(name = "sensor_software_version", required = true) protected String sensorSoftwareVersion; @XmlElement(name = "sensor_firmware_version", required = true) protected String sensorFirmwareVersion; @XmlElement(required = true) protected BigDecimal temperature; @XmlElement(name = "relative_humidity") protected byte relativeHumidity; @XmlElement(name = "atmospheric_pressure") protected int atmosphericPressure; @XmlElement(name = "acquisition_start", required = true) protected XMLGregorianCalendar acquisitionStart; @XmlElement(name = "acquisition_end", required = true) protected XMLGregorianCalendar acquisitionEnd; @XmlElement(required = true) protected Pose pose; @XmlElement(name = "index_bounds", required = true) protected IndexBounds indexBounds; @XmlElement(name = "cartesian_bounds", required = true) protected CartesianBounds cartesianBounds; @XmlElement(required = true) protected Sphericalbounds sphericalbounds; @XmlElement(name = "intensity_limits", required = true) protected IntensityLimits intensityLimits; @XmlElement(name = "color_limits", required = true) protected ColorLimits colorLimits; protected int pointSize; @XmlElement(name = "point_fields", required = true) protected PointFields pointFields; /** * Gets the value of the guid property. * * @return * possible object is * {@link String } * */ public String getGuid() { return guid; } /** * Sets the value of the guid property. * * @param value * allowed object is * {@link String } * */ public void setGuid(String value) { this.guid = value; } /** * Gets the value of the name property. * * @return * possible object is * {@link String } * */ public String getName() { return name; } /** * Sets the value of the name property. * * @param value * allowed object is * {@link String } * */ public void setName(String value) { this.name = value; } /** * Gets the value of the originalGuids property. * * @return * possible object is * {@link String } * */ public String getOriginalGuids() { return originalGuids; } /** * Sets the value of the originalGuids property. * * @param value * allowed object is * {@link String } * */ public void setOriginalGuids(String value) { this.originalGuids = value; } /** * Gets the value of the description property. * * @return * possible object is * {@link String } * */ public String getDescription() { return description; } /** * Sets the value of the description property. * * @param value * allowed object is * {@link String } * */ public void setDescription(String value) { this.description = value; } /** * Gets the value of the sensorVendor property. * * @return * possible object is * {@link String } * */ public String getSensorVendor() { return sensorVendor; } /** * Sets the value of the sensorVendor property. * * @param value * allowed object is * {@link String } * */ public void setSensorVendor(String value) { this.sensorVendor = value; } /** * Gets the value of the sensorModel property. * * @return * possible object is * {@link String } * */ public String getSensorModel() { return sensorModel; } /** * Sets the value of the sensorModel property. * * @param value * allowed object is * {@link String } * */ public void setSensorModel(String value) { this.sensorModel = value; } /** * Gets the value of the sensorSerialNumber property. * * @return * possible object is * {@link String } * */ public String getSensorSerialNumber() { return sensorSerialNumber; } /** * Sets the value of the sensorSerialNumber property. * * @param value * allowed object is * {@link String } * */ public void setSensorSerialNumber(String value) { this.sensorSerialNumber = value; } /** * Gets the value of the sensorHardwareVersion property. * * @return * possible object is * {@link String } * */ public String getSensorHardwareVersion() { return sensorHardwareVersion; } /** * Sets the value of the sensorHardwareVersion property. * * @param value * allowed object is * {@link String } * */ public void setSensorHardwareVersion(String value) { this.sensorHardwareVersion = value; } /** * Gets the value of the sensorSoftwareVersion property. * * @return * possible object is * {@link String } * */ public String getSensorSoftwareVersion() { return sensorSoftwareVersion; } /** * Sets the value of the sensorSoftwareVersion property. * * @param value * allowed object is * {@link String } * */ public void setSensorSoftwareVersion(String value) { this.sensorSoftwareVersion = value; } /** * Gets the value of the sensorFirmwareVersion property. * * @return * possible object is * {@link String } * */ public String getSensorFirmwareVersion() { return sensorFirmwareVersion; } /** * Sets the value of the sensorFirmwareVersion property. * * @param value * allowed object is * {@link String } * */ public void setSensorFirmwareVersion(String value) { this.sensorFirmwareVersion = value; } /** * Gets the value of the temperature property. * * @return * possible object is * {@link BigDecimal } * */ public BigDecimal getTemperature() { return temperature; } /** * Sets the value of the temperature property. * * @param value * allowed object is * {@link BigDecimal } * */ public void setTemperature(BigDecimal value) { this.temperature = value; } /** * Gets the value of the relativeHumidity property. * */ public byte getRelativeHumidity() { return relativeHumidity; } /** * Sets the value of the relativeHumidity property. * */ public void setRelativeHumidity(byte value) { this.relativeHumidity = value; } /** * Gets the value of the atmosphericPressure property. * */ public int getAtmosphericPressure() { return atmosphericPressure; } /** * Sets the value of the atmosphericPressure property. * */ public void setAtmosphericPressure(int value) { this.atmosphericPressure = value; } /** * Gets the value of the acquisitionStart property. * * @return * possible object is * {@link XMLGregorianCalendar } * */ public XMLGregorianCalendar getAcquisitionStart() { return acquisitionStart; } /** * Sets the value of the acquisitionStart property. * * @param value * allowed object is * {@link XMLGregorianCalendar } * */ public void setAcquisitionStart(XMLGregorianCalendar value) { this.acquisitionStart = value; } /** * Gets the value of the acquisitionEnd property. * * @return * possible object is * {@link XMLGregorianCalendar } * */ public XMLGregorianCalendar getAcquisitionEnd() { return acquisitionEnd; } /** * Sets the value of the acquisitionEnd property. * * @param value * allowed object is * {@link XMLGregorianCalendar } * */ public void setAcquisitionEnd(XMLGregorianCalendar value) { this.acquisitionEnd = value; } /** * Gets the value of the pose property. * * @return * possible object is * {@link Pose } * */ public Pose getPose() { return pose; } /** * Sets the value of the pose property. * * @param value * allowed object is * {@link Pose } * */ public void setPose(Pose value) { this.pose = value; } /** * Gets the value of the indexBounds property. * * @return * possible object is * {@link IndexBounds } * */ public IndexBounds getIndexBounds() { return indexBounds; } /** * Sets the value of the indexBounds property. * * @param value * allowed object is * {@link IndexBounds } * */ public void setIndexBounds(IndexBounds value) { this.indexBounds = value; } /** * Gets the value of the cartesianBounds property. * * @return * possible object is * {@link CartesianBounds } * */ public CartesianBounds getCartesianBounds() { return cartesianBounds; } /** * Sets the value of the cartesianBounds property. * * @param value * allowed object is * {@link CartesianBounds } * */ public void setCartesianBounds(CartesianBounds value) { this.cartesianBounds = value; } /** * Gets the value of the sphericalbounds property. * * @return * possible object is * {@link Sphericalbounds } * */ public Sphericalbounds getSphericalbounds() { return sphericalbounds; } /** * Sets the value of the sphericalbounds property. * * @param value * allowed object is * {@link Sphericalbounds } * */ public void setSphericalbounds(Sphericalbounds value) { this.sphericalbounds = value; } /** * Gets the value of the intensityLimits property. * * @return * possible object is * {@link IntensityLimits } * */ public IntensityLimits getIntensityLimits() { return intensityLimits; } /** * Sets the value of the intensityLimits property. * * @param value * allowed object is * {@link IntensityLimits } * */ public void setIntensityLimits(IntensityLimits value) { this.intensityLimits = value; } /** * Gets the value of the colorLimits property. * * @return * possible object is * {@link ColorLimits } * */ public ColorLimits getColorLimits() { return colorLimits; } /** * Sets the value of the colorLimits property. * * @param value * allowed object is * {@link ColorLimits } * */ public void setColorLimits(ColorLimits value) { this.colorLimits = value; } /** * Gets the value of the pointSize property. * */ public int getPointSize() { return pointSize; } /** * Sets the value of the pointSize property. * */ public void setPointSize(int value) { this.pointSize = value; } /** * Gets the value of the pointFields property. * * @return * possible object is * {@link PointFields } * */ public PointFields getPointFields() { return pointFields; } /** * Sets the value of the pointFields property. * * @param value * allowed object is * {@link PointFields } * */ public void setPointFields(PointFields value) { this.pointFields = value; } }
//======================================================================== //Copyright 2007-2010 David Yu dyuproject@gmail.com //------------------------------------------------------------------------ //Licensed under the Apache License, Version 2.0 (the "License"); //you may not use this file except in compliance with the License. //You may obtain a copy of the License at //http://www.apache.org/licenses/LICENSE-2.0 //Unless required by applicable law or agreed to in writing, software //distributed under the License is distributed on an "AS IS" BASIS, //WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. //See the License for the specific language governing permissions and //limitations under the License. //======================================================================== package io.protostuff.me; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.Vector; /** * Protobuf ser/deser util for messages/objects. * * @author David Yu * @created Oct 5, 2010 */ public final class ProtobufIOUtil { private ProtobufIOUtil() { } /** * Creates a protobuf pipe from a byte array. */ public static Pipe newPipe(byte[] data) { return newPipe(data, 0, data.length); } /** * Creates a protobuf pipe from a byte array. */ public static Pipe newPipe(byte[] data, int offset, int len) { final ByteArrayInput byteArrayInput = new ByteArrayInput(data, offset, len, false); return new Pipe() { protected Input begin(Pipe.Schema pipeSchema) throws IOException { return byteArrayInput; } protected void end(Pipe.Schema pipeSchema, Input input, boolean cleanupOnly) throws IOException { if (cleanupOnly) return; // assert input == byteArrayInput; } }; } /** * Creates a protobuf pipe from an {@link InputStream}. */ public static Pipe newPipe(final InputStream in) { final CodedInput codedInput = new CodedInput(in, false); return new Pipe() { protected Input begin(Pipe.Schema pipeSchema) throws IOException { return codedInput; } protected void end(Pipe.Schema pipeSchema, Input input, boolean cleanupOnly) throws IOException { if (cleanupOnly) return; // assert input == codedInput; } }; } /** * Merges the {@code message} with the byte array using the given {@code schema}. */ public static void mergeFrom(byte[] data, Object message, Schema schema) { IOUtil.mergeFrom(data, 0, data.length, message, schema, false); } /** * Merges the {@code message} with the byte array using the given {@code schema}. */ public static void mergeFrom(byte[] data, int offset, int length, Object message, Schema schema) { IOUtil.mergeFrom(data, offset, length, message, schema, false); } /** * Merges the {@code message} from the {@link InputStream} using the given {@code schema}. */ public static void mergeFrom(InputStream in, Object message, Schema schema) throws IOException { IOUtil.mergeFrom(in, message, schema, false); } /** * Merges the {@code message} from the {@link InputStream} using the given {@code schema}. * <p> * The {@code buffer}'s internal byte array will be used for reading the message. */ public static void mergeFrom(InputStream in, Object message, Schema schema, LinkedBuffer buffer) throws IOException { IOUtil.mergeFrom(in, buffer.buffer, message, schema, false); } /** * Merges the {@code message} (delimited) from the {@link InputStream} using the given {@code schema}. * * @return the size of the message */ public static int mergeDelimitedFrom(InputStream in, Object message, Schema schema) throws IOException { return IOUtil.mergeDelimitedFrom(in, message, schema, false); } /** * Merges the {@code message} (delimited) from the {@link InputStream} using the given {@code schema}. * <p> * The delimited message size must not be larger than the {@code buffer}'s size/capacity. {@link ProtobufException} * "size limit exceeded" is thrown otherwise. * * @return the size of the message */ public static int mergeDelimitedFrom(InputStream in, Object message, Schema schema, LinkedBuffer buffer) throws IOException { return IOUtil.mergeDelimitedFrom(in, buffer.buffer, message, schema, false); } /** * Used by the code generated messages that implement {@link java.io.Externalizable}. Merges from the * {@link DataInput}. * * @return the size of the message */ public static int mergeDelimitedFrom(DataInput in, Object message, Schema schema) throws IOException { return IOUtil.mergeDelimitedFrom(in, message, schema, false); } /** * Serializes the {@code message} into a byte array using the given schema. * * @return the byte array containing the data. */ public static byte[] toByteArray(Object message, Schema schema, LinkedBuffer buffer) { if (buffer.start != buffer.offset) throw new IllegalArgumentException("Buffer previously used and had not been reset."); final ProtobufOutput output = new ProtobufOutput(buffer); try { schema.writeTo(output, message); } catch (IOException e) { throw new RuntimeException("Serializing to a byte array threw an IOException " + "(should never happen)."); } return output.toByteArray(); } /** * Writes the {@code message} into the {@link LinkedBuffer} using the given schema. * * @return the size of the message */ public static int writeTo(LinkedBuffer buffer, Object message, Schema schema) { if (buffer.start != buffer.offset) throw new IllegalArgumentException("Buffer previously used and had not been reset."); final ProtobufOutput output = new ProtobufOutput(buffer); try { schema.writeTo(output, message); } catch (IOException e) { throw new RuntimeException("Serializing to a LinkedBuffer threw an IOException " + "(should never happen)."); } return output.getSize(); } /** * Serializes the {@code message} into an {@link OutputStream} using the given schema. * * @return the size of the message */ public static int writeTo(OutputStream out, Object message, Schema schema, LinkedBuffer buffer) throws IOException { if (buffer.start != buffer.offset) throw new IllegalArgumentException("Buffer previously used and had not been reset."); final ProtobufOutput output = new ProtobufOutput(buffer); schema.writeTo(output, message); return LinkedBuffer.writeTo(out, buffer); } /** * Serializes the {@code message}, prefixed with its length, into an {@link OutputStream}. * * @return the size of the message */ public static int writeDelimitedTo(OutputStream out, Object message, Schema schema, LinkedBuffer buffer) throws IOException { if (buffer.start != buffer.offset) throw new IllegalArgumentException("Buffer previously used and had not been reset."); final ProtobufOutput output = new ProtobufOutput(buffer); schema.writeTo(output, message); final int size = output.getSize(); ProtobufOutput.writeRawVarInt32Bytes(out, size); LinkedBuffer.writeTo(out, buffer); // final int msgSize = LinkedBuffer.writeTo(out, buffer); // assert size == msgSize; return size; } /** * Used by the code generated messages that implement {@link java.io.Externalizable}. Writes to the * {@link DataOutput} . * * @return the size of the message. */ public static int writeDelimitedTo(DataOutput out, Object message, Schema schema) throws IOException { final LinkedBuffer buffer = new LinkedBuffer(LinkedBuffer.MIN_BUFFER_SIZE); final ProtobufOutput output = new ProtobufOutput(buffer); schema.writeTo(output, message); final int size = output.getSize(); ProtobufOutput.writeRawVarInt32Bytes(out, size); LinkedBuffer.writeTo(out, buffer); // final int msgSize = LinkedBuffer.writeTo(out, buffer); // assert size == msgSize; return size; } /** * Serializes the {@code messages} (delimited) into an {@link OutputStream} using the given schema. * * @return the total size of the messages (excluding the length prefix varint) */ public static int writeListTo(OutputStream out, Vector messages, Schema schema, LinkedBuffer buffer) throws IOException { if (buffer.start != buffer.offset) throw new IllegalArgumentException("Buffer previously used and had not been reset."); final ProtobufOutput output = new ProtobufOutput(buffer); int totalSize = 0; for (int i = 0; i < messages.size(); i++) { Object m = messages.elementAt(i); schema.writeTo(output, m); final int size = output.getSize(); ProtobufOutput.writeRawVarInt32Bytes(out, size); LinkedBuffer.writeTo(out, buffer); // final int msgSize = LinkedBuffer.writeTo(out, buffer); // assert size == msgSize; totalSize += size; output.clear(); } return totalSize; } /** * Parses the {@code messages} (delimited) from the {@link InputStream} using the given {@code schema}. * * @return the list containing the messages. */ public static Vector parseListFrom(InputStream in, Schema schema) throws IOException { final Vector list = new Vector(); byte[] buf = null; int biggestLen = 0; LimitedInputStream lin = null; for (int size = in.read(); size != -1; size = in.read()) { final Object message = schema.newMessage(); list.addElement(message); final int len = size < 0x80 ? size : CodedInput.readRawVarint32(in, size); if (len != 0) { // not an empty message if (len > CodedInput.DEFAULT_BUFFER_SIZE) { // message too big if (lin == null) lin = new LimitedInputStream(in); final CodedInput input = new CodedInput(lin.limit(len), false); schema.mergeFrom(input, message); input.checkLastTagWas(0); continue; } if (biggestLen < len) { // cannot reuse buffer, allocate a bigger buffer // discard the last one for gc buf = new byte[len]; biggestLen = len; } IOUtil.fillBufferFrom(in, buf, 0, len); final ByteArrayInput input = new ByteArrayInput(buf, 0, len, false); try { schema.mergeFrom(input, message); } catch (ArrayIndexOutOfBoundsException e) { throw ProtobufException.truncatedMessage(e); } input.checkLastTagWas(0); } } return list; } /** * Optimal/Optional mergeDelimitedFrom - If the message does not fit the buffer, no merge is done and this method * will return false. * <p> * This is strictly for reading a single message from the stream because the buffer is aggressively filled when * reading the delimited size (which could result into reading more bytes than it has to). * <p> * The remaining bytes will be drained (consumed and discared) when the message is too large. */ public static boolean optMergeDelimitedFrom(InputStream in, Object message, Schema schema, LinkedBuffer buffer) throws IOException { return optMergeDelimitedFrom(in, message, schema, true, buffer); } /** * Optimal/Optional mergeDelimitedFrom - If the message does not fit the buffer, no merge is done and this method * will return false. * <p> * This is strictly for reading a single message from the stream because the buffer is aggressively filled when * reading the delimited size (which could result into reading more bytes than it has to). */ public static boolean optMergeDelimitedFrom(InputStream in, Object message, Schema schema, boolean drainRemainingBytesIfTooLarge, LinkedBuffer buffer) throws IOException { if (buffer.start != buffer.offset) throw new IllegalArgumentException("Buffer previously used and had not been reset."); final int size = IOUtil.fillBufferWithDelimitedMessageFrom(in, drainRemainingBytesIfTooLarge, buffer); if (size == 0) { // empty message return true; } if (buffer.start == buffer.offset) { // read offset not set ... message too large return false; } final ByteArrayInput input = new ByteArrayInput(buffer.buffer, buffer.offset, size, false); try { schema.mergeFrom(input, message); input.checkLastTagWas(0); } catch (ArrayIndexOutOfBoundsException e) { throw ProtobufException.truncatedMessage(e); } finally { // reset buffer.offset = buffer.start; } return true; } /** * Optimal writeDelimitedTo - The varint32 prefix is written to the buffer instead of directly writing to * outputstream. * * @return the size of the message */ public static int optWriteDelimitedTo(OutputStream out, Object message, Schema schema, LinkedBuffer buffer) throws IOException { if (buffer.start != buffer.offset) throw new IllegalArgumentException("Buffer previously used and had not been reset."); final ProtobufOutput output = new ProtobufOutput(buffer); // leave space for varint32 buffer.offset = buffer.start + 5; output.size += 5; schema.writeTo(output, message); final int size = output.size - 5; final int delimOffset = IOUtil.putVarInt32AndGetOffset(size, buffer.buffer, buffer.start); // write to stream out.write(buffer.buffer, delimOffset, buffer.offset - delimOffset); // flush remaining if (buffer.next != null) LinkedBuffer.writeTo(out, buffer.next); return size; } }
package com.ggstudios.core; import com.ggstudios.flappyword2.MainApplication; import com.ggstudios.flappyword2.R; import android.app.Activity; import android.graphics.Color; import android.graphics.Paint; import android.util.Log; import android.view.MotionEvent; public class Game { private static final String TAG = Game.class.getSimpleName(); public static final int STATE_CLEAN_START = 1, STATE_SAVED = 2, STATE_KILLED = 3; private int state; private boolean isGlDataLoaded = false; Player player; private Ground ground; private ObstacleManager om; private DrawableString score; private GameOverScreen gos; public Game() { state = STATE_CLEAN_START; Core.tm = new TextureManager(); Core.fm = new FontManager(); Core.grid = new Grid(); Core.drawables = new DrawableCollection<Drawable>(); Core.clickables = new ClickableCollection(); Core.gu = new GameUpdater(); Core.gu.start(); } public void onSurfaceCreated() { if (!isGlDataLoaded) { loadGlData(); } switch (state) { case STATE_CLEAN_START: setupScreen(); state = STATE_SAVED; break; default: break; } } /** * Do all first time SIZE INDEPENDENT GL initialization here. * * All screen size dependent GL loading should be done in * {@link #setupScreen()} */ public void loadGlData() { Log.d(TAG, "loadGlData()"); Core.tm.loadGameTextures(); isGlDataLoaded = true; } /** * GL data is guaranteed to be setup by this point. * * Do all SIZE DEPENDANT initialization here... This will be called whenever * canvas size changes... */ private void setupScreen() { Log.d(TAG, "setupScreen()"); isOver = false; Core.drawables.clear(); Core.clickables.clear(); Core.gu.clearGameUpdatables(); Core.gu.clearUiUpdatables(); Core.fm.generateFont(Core.SDP_H * 0.9f); // reset screen state... Core.offX = 0; Core.offY = 0; Paint p = new Paint(); p.setColor(0xFF000000); p.setAntiAlias(true); p.setTypeface(MainApplication.get().getThemeFont()); p.setTextSize(Core.SDP * 1.2f); player = new Player(0, 0, p); player.setLocation((Core.canvasWidth - player.w) / 2, (Core.canvasHeight - player.h) / 2); Paint p2 = new Paint(p); p2.setTextSize(Core.SDP * 2f); speed = 0; float speed = Core.SDP * 6f; ground = new Ground(Core.canvasWidth, Core.canvasHeight / 6f, speed); om = new ObstacleManager(p2, Core.SDP * 4.5f, 1.8f, 0, Core.canvasHeight - (Core.canvasHeight / 6f), speed); gos = new GameOverScreen(); score = new DrawableString(Core.SDP, Core.SDP, Core.fm, "Score: "); DrawableString hscore = new DrawableString(Core.SDP, Core.SDP + Core.fm.getFontSize(), Core.fm, "High Score: " + Core.context.getSharedPreferences("pref", 0).getInt("high_score", 0)); Core.drawables.addDrawable(gos); Core.clickables.addClickable(gos); Core.drawables.addDrawable(score); Core.drawables.addDrawable(hscore); Core.drawables.addDrawable(ground); Core.drawables.addDrawable(om); Core.drawables.addDrawable(player); Core.gu.addGameUpdatable(gameLoop); TERMINAL_VELOCITY = Core.SDP * 6f; accel = Core.SDP_H * 0.02f; } private static float TERMINAL_VELOCITY; float accel = 0f; float speed = 0f; float fallTime = 0f; Updatable gameLoop = new Updatable() { @Override public boolean update(float dt) { fallTime += dt; // gravity... if (player.y + player.h <= Core.canvasWidth) { player.offsetY(speed * Core.SDP); speed += accel * dt; if (speed > TERMINAL_VELOCITY) { speed = TERMINAL_VELOCITY; } checkIfDead(); } score.setText("Score: " + player.score); ground.update(dt); om.update(dt); return true; } }; public boolean isOver = false; protected void checkIfDead() { if(ground.intersect(player.rect) || om.intersect(player.rect)) { dead(); } } private void dead() { isOver = true; gos.show(player.score); if(Core.game.getScore() > Core.context.getSharedPreferences("pref", 0).getInt("high_score", 0)) { Core.context.getSharedPreferences("pref", 0).edit().putInt("high_score", Core.game.getScore()).commit(); MainApplication.get().setHighScore(Core.game.getScore()); } } public void notifySurfaceChanged() { setupScreen(); } public int getState() { return state; } public void restart() { Core.glView.queueEvent(new Runnable() { @Override public void run() { setupScreen(); } }); } public void restarted() { // TODO Auto-generated method stub } public void refresh() { Log.d(TAG, "refresh()"); Core.fm.refresh(); Core.drawables.refresh(); } public void onPause() { Core.gu.pause(); } public void onResume() { Core.gu.unpause(); } public void onTouchEvent(MotionEvent event) { float eX = event.getX(); float eY = event.getY(); switch (event.getAction()) { case MotionEvent.ACTION_DOWN: { if (!isOver) speed = -Core.SDP_H * 0.006f; break; } } Core.clickables.onTouchEvent(event.getAction(), eX, eY); } public int getScore() { return player.score; } public void onDestroy() { Core.gu.gameDone(); } public void incrementScore() { player.score++; } }
/** * Copyright (C) 2009 - 2013 SC 4ViewSoft SRL * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.achartengine.chart; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.SortedMap; import org.achartengine.model.Point; import org.achartengine.model.SeriesSelection; import org.achartengine.model.XYMultipleSeriesDataset; import org.achartengine.model.XYSeries; import org.achartengine.renderer.BasicStroke; import org.achartengine.renderer.DefaultRenderer; import org.achartengine.renderer.SimpleSeriesRenderer; import org.achartengine.renderer.XYMultipleSeriesRenderer; import org.achartengine.renderer.XYMultipleSeriesRenderer.Orientation; import org.achartengine.renderer.XYSeriesRenderer; import org.achartengine.util.MathHelper; import android.graphics.Canvas; import android.graphics.DashPathEffect; import android.graphics.Paint; import android.graphics.Paint.Align; import android.graphics.Paint.Cap; import android.graphics.Paint.Join; import android.graphics.Paint.Style; import android.graphics.PathEffect; import android.graphics.Rect; import android.graphics.RectF; import android.graphics.Typeface; /** * The XY chart rendering class. */ public abstract class XYChart extends AbstractChart { /** The multiple series dataset. */ protected XYMultipleSeriesDataset mDataset; /** The multiple series renderer. */ protected XYMultipleSeriesRenderer mRenderer; /** The current scale value. */ private float mScale; /** The current translate value. */ private float mTranslate; /** The canvas center point. */ private Point mCenter; /** The visible chart area, in screen coordinates. */ private Rect mScreenR; /** The calculated range. */ private final Map<Integer, double[]> mCalcRange = new HashMap<Integer, double[]>(); /** * The clickable areas for all points. The array index is the series index, * and the RectF list index is the point index in that series. */ private Map<Integer, List<ClickableArea>> clickableAreas = new HashMap<Integer, List<ClickableArea>>(); protected XYChart() { } /** * Builds a new XY chart instance. * * @param dataset the multiple series dataset * @param renderer the multiple series renderer */ public XYChart(XYMultipleSeriesDataset dataset, XYMultipleSeriesRenderer renderer) { mDataset = dataset; mRenderer = renderer; } // TODO: javadoc protected void setDatasetRenderer(XYMultipleSeriesDataset dataset, XYMultipleSeriesRenderer renderer) { mDataset = dataset; mRenderer = renderer; } /** * The graphical representation of the XY chart. * * @param canvas the canvas to paint to * @param x the top left x value of the view to draw to * @param y the top left y value of the view to draw to * @param width the width of the view to draw to * @param height the height of the view to draw to * @param paint the paint */ public void draw(Canvas canvas, int x, int y, int width, int height, Paint paint) { paint.setAntiAlias(mRenderer.isAntialiasing()); int legendSize = getLegendSize(mRenderer, height / 5, mRenderer.getAxisTitleTextSize()); int[] margins = mRenderer.getMargins(); int left = x + margins[1]; int top = y + margins[0]; int right = x + width - margins[3]; int sLength = mDataset.getSeriesCount(); String[] titles = new String[sLength]; for (int i = 0; i < sLength; i++) { titles[i] = mDataset.getSeriesAt(i).getTitle(); } if (mRenderer.isFitLegend() && mRenderer.isShowLegend()) { legendSize = drawLegend(canvas, mRenderer, titles, left, right, y, width, height, legendSize, paint, true); } int bottom = y + height - margins[2] - legendSize; if (mScreenR == null) { mScreenR = new Rect(); } mScreenR.set(left, top, right, bottom); drawBackground(mRenderer, canvas, x, y, width, height, paint, false, DefaultRenderer.NO_COLOR); if (paint.getTypeface() == null || (mRenderer.getTextTypeface() != null && paint.getTypeface().equals( mRenderer.getTextTypeface())) || !paint.getTypeface().toString().equals(mRenderer.getTextTypefaceName()) || paint.getTypeface().getStyle() != mRenderer.getTextTypefaceStyle()) { if (mRenderer.getTextTypeface() != null) { paint.setTypeface(mRenderer.getTextTypeface()); } else { paint.setTypeface(Typeface.create(mRenderer.getTextTypefaceName(), mRenderer.getTextTypefaceStyle())); } } Orientation or = mRenderer.getOrientation(); if (or == Orientation.VERTICAL) { right -= legendSize; bottom += legendSize - 20; } int angle = or.getAngle(); boolean rotate = angle == 90; mScale = (float) (height) / width; mTranslate = Math.abs(width - height) / 2; if (mScale < 1) { mTranslate *= -1; } mCenter = new Point((x + width) / 2, (y + height) / 2); if (rotate) { transform(canvas, angle, false); } int maxScaleNumber = -Integer.MAX_VALUE; for (int i = 0; i < sLength; i++) { maxScaleNumber = Math.max(maxScaleNumber, mDataset.getSeriesAt(i).getScaleNumber()); } maxScaleNumber++; if (maxScaleNumber < 0) { return; } double[] minX = new double[maxScaleNumber]; double[] maxX = new double[maxScaleNumber]; double[] minY = new double[maxScaleNumber]; double[] maxY = new double[maxScaleNumber]; boolean[] isMinXSet = new boolean[maxScaleNumber]; boolean[] isMaxXSet = new boolean[maxScaleNumber]; boolean[] isMinYSet = new boolean[maxScaleNumber]; boolean[] isMaxYSet = new boolean[maxScaleNumber]; for (int i = 0; i < maxScaleNumber; i++) { minX[i] = mRenderer.getXAxisMin(i); maxX[i] = mRenderer.getXAxisMax(i); minY[i] = mRenderer.getYAxisMin(i); maxY[i] = mRenderer.getYAxisMax(i); isMinXSet[i] = mRenderer.isMinXSet(i); isMaxXSet[i] = mRenderer.isMaxXSet(i); isMinYSet[i] = mRenderer.isMinYSet(i); isMaxYSet[i] = mRenderer.isMaxYSet(i); if (mCalcRange.get(i) == null) { mCalcRange.put(i, new double[4]); } } double[] xPixelsPerUnit = new double[maxScaleNumber]; double[] yPixelsPerUnit = new double[maxScaleNumber]; for (int i = 0; i < sLength; i++) { XYSeries series = mDataset.getSeriesAt(i); int scale = series.getScaleNumber(); if (series.getItemCount() == 0) { continue; } if (!isMinXSet[scale]) { double minimumX = series.getMinX(); minX[scale] = Math.min(minX[scale], minimumX); mCalcRange.get(scale)[0] = minX[scale]; } if (!isMaxXSet[scale]) { double maximumX = series.getMaxX(); maxX[scale] = Math.max(maxX[scale], maximumX); mCalcRange.get(scale)[1] = maxX[scale]; } if (!isMinYSet[scale]) { double minimumY = series.getMinY(); minY[scale] = Math.min(minY[scale], (float) minimumY); mCalcRange.get(scale)[2] = minY[scale]; } if (!isMaxYSet[scale]) { double maximumY = series.getMaxY(); maxY[scale] = Math.max(maxY[scale], (float) maximumY); mCalcRange.get(scale)[3] = maxY[scale]; } } for (int i = 0; i < maxScaleNumber; i++) { if (maxX[i] - minX[i] != 0) { xPixelsPerUnit[i] = (right - left) / (maxX[i] - minX[i]); } if (maxY[i] - minY[i] != 0) { yPixelsPerUnit[i] = (float) ((bottom - top) / (maxY[i] - minY[i])); } // the X axis on multiple scales was wrong without this fix if (i > 0) { xPixelsPerUnit[i] = xPixelsPerUnit[0]; minX[i] = minX[0]; maxX[i] = maxX[0]; } } boolean hasValues = false; // use a linked list for these reasons: // 1) Avoid a large contiguous memory allocation // 2) We don't need random seeking, only sequential reading/writing, so // linked list makes sense clickableAreas = new HashMap<Integer, List<ClickableArea>>(); for (int i = 0; i < sLength; i++) { XYSeries series = mDataset.getSeriesAt(i); int scale = series.getScaleNumber(); if (series.getItemCount() == 0) { continue; } hasValues = true; XYSeriesRenderer seriesRenderer = (XYSeriesRenderer) mRenderer.getSeriesRendererAt(i); // int originalValuesLength = series.getItemCount(); // int valuesLength = originalValuesLength; // int length = valuesLength * 2; List<Float> points = new ArrayList<Float>(); List<Double> values = new ArrayList<Double>(); float yAxisValue = Math.min(bottom, (float) (bottom + yPixelsPerUnit[scale] * minY[scale])); LinkedList<ClickableArea> clickableArea = new LinkedList<ClickableArea>(); clickableAreas.put(i, clickableArea); synchronized (series) { SortedMap<Double, Double> range = series.getRange(minX[scale], maxX[scale], seriesRenderer.isDisplayBoundingPoints()); int startIndex = -1; for (Entry<Double, Double> value : range.entrySet()) { double xValue = value.getKey(); double yValue = value.getValue(); if (startIndex < 0 && (!isNullValue(yValue) || isRenderNullValues())) { startIndex = series.getIndexForKey(xValue); } // points.add((float) (left + xPixelsPerUnit[scale] // * (value.getKey().floatValue() - minX[scale]))); // points.add((float) (bottom - yPixelsPerUnit[scale] // * (value.getValue().floatValue() - minY[scale]))); values.add(value.getKey()); values.add(value.getValue()); if (!isNullValue(yValue)) { points.add((float) (left + xPixelsPerUnit[scale] * (xValue - minX[scale]))); points.add((float) (bottom - yPixelsPerUnit[scale] * (yValue - minY[scale]))); } else if (isRenderNullValues()) { points.add((float) (left + xPixelsPerUnit[scale] * (xValue - minX[scale]))); points.add((float) (bottom - yPixelsPerUnit[scale] * (-minY[scale]))); } else { if (points.size() > 0) { drawSeries(series, canvas, paint, points, seriesRenderer, yAxisValue, i, or, startIndex); ClickableArea[] clickableAreasForSubSeries = clickableAreasForPoints(points, values, yAxisValue, i, startIndex); clickableArea.addAll(Arrays.asList(clickableAreasForSubSeries)); points.clear(); values.clear(); startIndex = -1; } clickableArea.add(null); } } int count = series.getAnnotationCount(); if (count > 0) { paint.setColor(seriesRenderer.getAnnotationsColor()); paint.setTextSize(seriesRenderer.getAnnotationsTextSize()); paint.setTextAlign(seriesRenderer.getAnnotationsTextAlign()); Rect bound = new Rect(); for (int j = 0; j < count; j++) { float xS = (float) (left + xPixelsPerUnit[scale] * (series.getAnnotationX(j) - minX[scale])); float yS = (float) (bottom - yPixelsPerUnit[scale] * (series.getAnnotationY(j) - minY[scale])); paint.getTextBounds(series.getAnnotationAt(j), 0, series.getAnnotationAt(j).length(), bound); if (xS < (xS + bound.width()) && yS < canvas.getHeight()) { drawString(canvas, series.getAnnotationAt(j), xS, yS, paint); } } } if (points.size() > 0) { drawSeries(series, canvas, paint, points, seriesRenderer, yAxisValue, i, or, startIndex); ClickableArea[] clickableAreasForSubSeries = clickableAreasForPoints(points, values, yAxisValue, i, startIndex); clickableArea.addAll(Arrays.asList(clickableAreasForSubSeries)); } } } // draw stuff over the margins such as data doesn't render on these areas drawBackground(mRenderer, canvas, x, bottom, width, height - bottom, paint, true, mRenderer.getMarginsColor()); drawBackground(mRenderer, canvas, x, y, width, margins[0], paint, true, mRenderer.getMarginsColor()); if (or == Orientation.HORIZONTAL) { drawBackground(mRenderer, canvas, x, y, left - x, height - y, paint, true, mRenderer.getMarginsColor()); drawBackground(mRenderer, canvas, right, y, margins[3], height - y, paint, true, mRenderer.getMarginsColor()); } else if (or == Orientation.VERTICAL) { drawBackground(mRenderer, canvas, right, y, width - right, height - y, paint, true, mRenderer.getMarginsColor()); drawBackground(mRenderer, canvas, x, y, left - x, height - y, paint, true, mRenderer.getMarginsColor()); } boolean showLabels = mRenderer.isShowLabels() && hasValues; boolean showGridX = mRenderer.isShowGridX(); // boolean showCustomTextGridX = mRenderer.isShowCustomTextGridX(); boolean showCustomTextGridY = mRenderer.isShowCustomTextGridY(); if (showLabels || showGridX) { List<Double> xLabels = getValidLabels(getXLabels(minX[0], maxX[0], mRenderer.getXLabels())); Map<Integer, List<Double>> allYLabels = getYLabels(minY, maxY, maxScaleNumber); int xLabelsLeft = left; if (showLabels) { paint.setColor(mRenderer.getXLabelsColor()); paint.setTextSize(mRenderer.getLabelsTextSize()); paint.setTextAlign(mRenderer.getXLabelsAlign()); // if (mRenderer.getXLabelsAlign() == Align.LEFT) { // xLabelsLeft += mRenderer.getLabelsTextSize() / 4; // } } drawXLabels(xLabels, mRenderer.getXTextLabelLocations(), canvas, paint, xLabelsLeft, top, bottom, xPixelsPerUnit[0], minX[0], maxX[0]); drawYLabels(allYLabels, canvas, paint, maxScaleNumber, left, right, bottom, yPixelsPerUnit, minY); if (showLabels) { paint.setColor(mRenderer.getLabelsColor()); for (int i = 0; i < maxScaleNumber; i++) { Align axisAlign = mRenderer.getYAxisAlign(i); Double[] yTextLabelLocations = mRenderer.getYTextLabelLocations(i); for (Double location : yTextLabelLocations) { if (minY[i] <= location && location <= maxY[i]) { float yLabel = (float) (bottom - yPixelsPerUnit[i] * (location.doubleValue() - minY[i])); String label = mRenderer.getYTextLabel(location, i); paint.setColor(mRenderer.getYLabelsColor(i)); paint.setTextAlign(mRenderer.getYLabelsAlign(i)); if (or == Orientation.HORIZONTAL) { if (axisAlign == Align.LEFT) { canvas.drawLine(left + getLabelLinePos(axisAlign), yLabel, left, yLabel, paint); drawText(canvas, label, left, yLabel - mRenderer.getYLabelsVerticalPadding(), paint, mRenderer.getYLabelsAngle()); } else { canvas.drawLine(right, yLabel, right + getLabelLinePos(axisAlign), yLabel, paint); drawText(canvas, label, right, yLabel - mRenderer.getYLabelsVerticalPadding(), paint, mRenderer.getYLabelsAngle()); } if (showCustomTextGridY) { paint.setColor(mRenderer.getGridColor(i)); canvas.drawLine(left, yLabel, right, yLabel, paint); } } else { canvas.drawLine(right - getLabelLinePos(axisAlign), yLabel, right, yLabel, paint); drawText(canvas, label, right + 10, yLabel - mRenderer.getYLabelsVerticalPadding(), paint, mRenderer.getYLabelsAngle()); if (showCustomTextGridY) { paint.setColor(mRenderer.getGridColor(i)); canvas.drawLine(right, yLabel, left, yLabel, paint); } } } } } } if (showLabels) { paint.setColor(mRenderer.getLabelsColor()); float size = mRenderer.getAxisTitleTextSize(); paint.setTextSize(size); paint.setTextAlign(Align.CENTER); if (or == Orientation.HORIZONTAL) { drawText( canvas, mRenderer.getXTitle(), x + width / 2, bottom + mRenderer.getLabelsTextSize() * 4 / 3 + mRenderer.getXLabelsPadding() + size, paint, 0); for (int i = 0; i < maxScaleNumber; i++) { Align axisAlign = mRenderer.getYAxisAlign(i); if (axisAlign == Align.LEFT) { drawText(canvas, mRenderer.getYTitle(i), x + size, y + height / 2, paint, -90); } else { drawText(canvas, mRenderer.getYTitle(i), x + width, y + height / 2, paint, -90); } } paint.setTextSize(mRenderer.getChartTitleTextSize()); drawText(canvas, mRenderer.getChartTitle(), x + width / 2, y + mRenderer.getChartTitleTextSize(), paint, 0); } else if (or == Orientation.VERTICAL) { drawText(canvas, mRenderer.getXTitle(), x + width / 2, y + height - size + mRenderer.getXLabelsPadding(), paint, -90); drawText(canvas, mRenderer.getYTitle(), right + 20, y + height / 2, paint, 0); paint.setTextSize(mRenderer.getChartTitleTextSize()); drawText(canvas, mRenderer.getChartTitle(), x + size, top + height / 2, paint, 0); } } } if (or == Orientation.HORIZONTAL) { drawLegend(canvas, mRenderer, titles, left, right, y + (int) mRenderer.getXLabelsPadding(), width, height, legendSize, paint, false); } else if (or == Orientation.VERTICAL) { transform(canvas, angle, true); drawLegend(canvas, mRenderer, titles, left, right, y + (int) mRenderer.getXLabelsPadding(), width, height, legendSize, paint, false); transform(canvas, angle, false); } if (mRenderer.isShowAxes()) { paint.setColor(mRenderer.getAxesColor()); canvas.drawLine(left, bottom, right, bottom, paint); boolean rightAxis = false; for (int i = 0; i < maxScaleNumber && !rightAxis; i++) { rightAxis = mRenderer.getYAxisAlign(i) == Align.RIGHT; } if (or == Orientation.HORIZONTAL) { canvas.drawLine(left, top, left, bottom, paint); if (rightAxis) { canvas.drawLine(right, top, right, bottom, paint); } } else if (or == Orientation.VERTICAL) { canvas.drawLine(right, top, right, bottom, paint); } } if (rotate) { transform(canvas, angle, true); } } protected List<Double> getXLabels(double min, double max, int count) { return MathHelper.getLabels(min, max, count); } protected Map<Integer, List<Double>> getYLabels(double[] minY, double[] maxY, int maxScaleNumber) { Map<Integer, List<Double>> allYLabels = new HashMap<Integer, List<Double>>(); for (int i = 0; i < maxScaleNumber; i++) { allYLabels.put(i, getValidLabels(MathHelper.getLabels(minY[i], maxY[i], mRenderer.getYLabels()))); } return allYLabels; } protected Rect getScreenR() { return mScreenR; } protected void setScreenR(Rect screenR) { mScreenR = screenR; } private List<Double> getValidLabels(List<Double> labels) { List<Double> result = new ArrayList<Double>(labels); for (Double label : labels) { if (label.isNaN()) { result.remove(label); } } return result; } /** * Draws the series. * * @param series the series * @param canvas the canvas * @param paint the paint object * @param pointsList the points to be rendered * @param seriesRenderer the series renderer * @param yAxisValue the y axis value in pixels * @param seriesIndex the series index * @param or the orientation * @param startIndex the start index of the rendering points */ protected void drawSeries(XYSeries series, Canvas canvas, Paint paint, List<Float> pointsList, XYSeriesRenderer seriesRenderer, float yAxisValue, int seriesIndex, Orientation or, int startIndex) { BasicStroke stroke = seriesRenderer.getStroke(); Cap cap = paint.getStrokeCap(); Join join = paint.getStrokeJoin(); float miter = paint.getStrokeMiter(); PathEffect pathEffect = paint.getPathEffect(); Style style = paint.getStyle(); if (stroke != null) { PathEffect effect = null; if (stroke.getIntervals() != null) { effect = new DashPathEffect(stroke.getIntervals(), stroke.getPhase()); } setStroke(stroke.getCap(), stroke.getJoin(), stroke.getMiter(), Style.FILL_AND_STROKE, effect, paint); } // float[] points = MathHelper.getFloats(pointsList); drawSeries(canvas, paint, pointsList, seriesRenderer, yAxisValue, seriesIndex, startIndex); if (isRenderPoints(seriesRenderer)) { ScatterChart pointsChart = getPointsChart(); if (pointsChart != null) { pointsChart.drawSeries(canvas, paint, pointsList, seriesRenderer, yAxisValue, seriesIndex, startIndex); } } paint.setTextSize(seriesRenderer.getChartValuesTextSize()); if (or == Orientation.HORIZONTAL) { paint.setTextAlign(Align.CENTER); } else { paint.setTextAlign(Align.LEFT); } if (seriesRenderer.isDisplayChartValues()) { paint.setTextAlign(seriesRenderer.getChartValuesTextAlign()); drawChartValuesText(canvas, series, seriesRenderer, paint, pointsList, seriesIndex, startIndex); } if (stroke != null) { setStroke(cap, join, miter, style, pathEffect, paint); } } private void setStroke(Cap cap, Join join, float miter, Style style, PathEffect pathEffect, Paint paint) { paint.setStrokeCap(cap); paint.setStrokeJoin(join); paint.setStrokeMiter(miter); paint.setPathEffect(pathEffect); paint.setStyle(style); } /** * The graphical representation of the series values as text. * * @param canvas the canvas to paint to * @param series the series to be painted * @param renderer the series renderer * @param paint the paint to be used for drawing * @param points the array of points to be used for drawing the series * @param seriesIndex the index of the series currently being drawn * @param startIndex the start index of the rendering points */ protected void drawChartValuesText(Canvas canvas, XYSeries series, XYSeriesRenderer renderer, Paint paint, List<Float> points, int seriesIndex, int startIndex) { if (points.size() > 1) { // there are more than one point // record the first point's position float previousPointX = points.get(0); float previousPointY = points.get(1); for (int k = 0; k < points.size(); k += 2) { if (k == 2) { // decide whether to display first two points' values or // not if (Math.abs(points.get(2) - points.get(0)) > renderer.getDisplayChartValuesDistance() || Math.abs(points.get(3) - points.get(1)) > renderer.getDisplayChartValuesDistance()) { // first point drawText(canvas, getLabel(renderer.getChartValuesFormat(), series.getY(startIndex)), points.get(0), points.get(1) - renderer.getChartValuesSpacing(), paint, 0); // second point drawText(canvas, getLabel(renderer.getChartValuesFormat(), series.getY(startIndex + 1)), points.get(2), points.get(3) - renderer.getChartValuesSpacing(), paint, 0); previousPointX = points.get(2); previousPointY = points.get(3); } } else if (k > 2) { // compare current point's position with the previous point's, if they // are not too close, display if (Math.abs(points.get(k) - previousPointX) > renderer.getDisplayChartValuesDistance() || Math.abs(points.get(k + 1) - previousPointY) > renderer .getDisplayChartValuesDistance()) { drawText(canvas, getLabel(renderer.getChartValuesFormat(), series.getY(startIndex + k / 2)), points.get(k), points.get(k + 1) - renderer.getChartValuesSpacing(), paint, 0); previousPointX = points.get(k); previousPointY = points.get(k + 1); } } } } else { // if only one point, display it for (int k = 0; k < points.size(); k += 2) { drawText(canvas, getLabel(renderer.getChartValuesFormat(), series.getY(startIndex + k / 2)), points.get(k), points.get(k + 1) - renderer.getChartValuesSpacing(), paint, 0); } } } /** * The graphical representation of a text, to handle both HORIZONTAL and * VERTICAL orientations and extra rotation angles. * * @param canvas the canvas to paint to * @param text the text to be rendered * @param x the X axis location of the text * @param y the Y axis location of the text * @param paint the paint to be used for drawing * @param extraAngle the text angle */ protected void drawText(Canvas canvas, String text, float x, float y, Paint paint, float extraAngle) { float angle = -mRenderer.getOrientation().getAngle() + extraAngle; if (angle != 0) { // canvas.scale(1 / mScale, mScale); canvas.rotate(angle, x, y); } drawString(canvas, text, x, y, paint); if (angle != 0) { canvas.rotate(-angle, x, y); // canvas.scale(mScale, 1 / mScale); } } /** * Transform the canvas such as it can handle both HORIZONTAL and VERTICAL * orientations. * * @param canvas the canvas to paint to * @param angle the angle of rotation * @param inverse if the inverse transform needs to be applied */ private void transform(Canvas canvas, float angle, boolean inverse) { if (inverse) { canvas.scale(1 / mScale, mScale); canvas.translate(mTranslate, -mTranslate); canvas.rotate(-angle, mCenter.getX(), mCenter.getY()); } else { canvas.rotate(angle, mCenter.getX(), mCenter.getY()); canvas.translate(-mTranslate, mTranslate); canvas.scale(mScale, 1 / mScale); } } /** * The graphical representation of the labels on the X axis. * * @param xLabels the X labels values * @param xTextLabelLocations the X text label locations * @param canvas the canvas to paint to * @param paint the paint to be used for drawing * @param left the left value of the labels area * @param top the top value of the labels area * @param bottom the bottom value of the labels area * @param xPixelsPerUnit the amount of pixels per one unit in the chart labels * @param minX the minimum value on the X axis in the chart * @param maxX the maximum value on the X axis in the chart */ protected void drawXLabels(List<Double> xLabels, Double[] xTextLabelLocations, Canvas canvas, Paint paint, int left, int top, int bottom, double xPixelsPerUnit, double minX, double maxX) { int length = xLabels.size(); boolean showLabels = mRenderer.isShowLabels(); boolean showGridY = mRenderer.isShowGridY(); for (int i = 0; i < length; i++) { double label = xLabels.get(i); float xLabel = (float) (left + xPixelsPerUnit * (label - minX)); if (showLabels) { paint.setColor(mRenderer.getXLabelsColor()); canvas.drawLine(xLabel, bottom, xLabel, bottom + mRenderer.getLabelsTextSize() / 3, paint); drawText(canvas, getLabel(mRenderer.getLabelFormat(), label), xLabel, bottom + mRenderer.getLabelsTextSize() * 4 / 3 + mRenderer.getXLabelsPadding(), paint, mRenderer.getXLabelsAngle()); } if (showGridY) { paint.setColor(mRenderer.getGridColor(0)); canvas.drawLine(xLabel, bottom, xLabel, top, paint); } } drawXTextLabels(xTextLabelLocations, canvas, paint, showLabels, left, top, bottom, xPixelsPerUnit, minX, maxX); } /** * The graphical representation of the labels on the Y axis. * * @param allYLabels the Y labels values * @param canvas the canvas to paint to * @param paint the paint to be used for drawing * @param maxScaleNumber the maximum scale number * @param left the left value of the labels area * @param right the right value of the labels area * @param bottom the bottom value of the labels area * @param yPixelsPerUnit the amount of pixels per one unit in the chart labels * @param minY the minimum value on the Y axis in the chart */ protected void drawYLabels(Map<Integer, List<Double>> allYLabels, Canvas canvas, Paint paint, int maxScaleNumber, int left, int right, int bottom, double[] yPixelsPerUnit, double[] minY) { Orientation or = mRenderer.getOrientation(); boolean showGridX = mRenderer.isShowGridX(); boolean showLabels = mRenderer.isShowLabels(); for (int i = 0; i < maxScaleNumber; i++) { paint.setTextAlign(mRenderer.getYLabelsAlign(i)); List<Double> yLabels = allYLabels.get(i); int length = yLabels.size(); for (int j = 0; j < length; j++) { double label = yLabels.get(j); Align axisAlign = mRenderer.getYAxisAlign(i); boolean textLabel = mRenderer.getYTextLabel(label, i) != null; float yLabel = (float) (bottom - yPixelsPerUnit[i] * (label - minY[i])); if (or == Orientation.HORIZONTAL) { if (showLabels && !textLabel) { paint.setColor(mRenderer.getYLabelsColor(i)); if (axisAlign == Align.LEFT) { canvas.drawLine(left + getLabelLinePos(axisAlign), yLabel, left, yLabel, paint); drawText(canvas, getLabel(mRenderer.getLabelFormat(), label), left - mRenderer.getYLabelsPadding(), yLabel - mRenderer.getYLabelsVerticalPadding(), paint, mRenderer.getYLabelsAngle()); } else { canvas.drawLine(right, yLabel, right + getLabelLinePos(axisAlign), yLabel, paint); drawText(canvas, getLabel(mRenderer.getLabelFormat(), label), right + mRenderer.getYLabelsPadding(), yLabel - mRenderer.getYLabelsVerticalPadding(), paint, mRenderer.getYLabelsAngle()); } } if (showGridX) { paint.setColor(mRenderer.getGridColor(i)); canvas.drawLine(left, yLabel, right, yLabel, paint); } } else if (or == Orientation.VERTICAL) { if (showLabels && !textLabel) { paint.setColor(mRenderer.getYLabelsColor(i)); canvas.drawLine(right - getLabelLinePos(axisAlign), yLabel, right, yLabel, paint); drawText(canvas, getLabel(mRenderer.getLabelFormat(), label), right + 10 + mRenderer.getYLabelsPadding(), yLabel - mRenderer.getYLabelsVerticalPadding(), paint, mRenderer.getYLabelsAngle()); } if (showGridX) { paint.setColor(mRenderer.getGridColor(i)); canvas.drawLine(right, yLabel, left, yLabel, paint); } } } } } /** * The graphical representation of the text labels on the X axis. * * @param xTextLabelLocations the X text label locations * @param canvas the canvas to paint to * @param paint the paint to be used for drawing * @param left the left value of the labels area * @param top the top value of the labels area * @param bottom the bottom value of the labels area * @param xPixelsPerUnit the amount of pixels per one unit in the chart labels * @param minX the minimum value on the X axis in the chart * @param maxX the maximum value on the X axis in the chart */ protected void drawXTextLabels(Double[] xTextLabelLocations, Canvas canvas, Paint paint, boolean showLabels, int left, int top, int bottom, double xPixelsPerUnit, double minX, double maxX) { boolean showCustomTextGridX = mRenderer.isShowCustomTextGridX(); if (showLabels) { paint.setColor(mRenderer.getXLabelsColor()); for (Double location : xTextLabelLocations) { if (minX <= location && location <= maxX) { float xLabel = (float) (left + xPixelsPerUnit * (location.doubleValue() - minX)); paint.setColor(mRenderer.getXLabelsColor()); canvas .drawLine(xLabel, bottom, xLabel, bottom + mRenderer.getLabelsTextSize() / 3, paint); drawText(canvas, mRenderer.getXTextLabel(location), xLabel, bottom + mRenderer.getLabelsTextSize() * 4 / 3, paint, mRenderer.getXLabelsAngle()); if (showCustomTextGridX) { paint.setColor(mRenderer.getGridColor(0)); canvas.drawLine(xLabel, bottom, xLabel, top, paint); } } } } } // TODO: docs public XYMultipleSeriesRenderer getRenderer() { return mRenderer; } public XYMultipleSeriesDataset getDataset() { return mDataset; } public double[] getCalcRange(int scale) { return mCalcRange.get(scale); } public void setCalcRange(double[] range, int scale) { mCalcRange.put(scale, range); } public double[] toRealPoint(float screenX, float screenY) { return toRealPoint(screenX, screenY, 0); } public double[] toScreenPoint(double[] realPoint) { return toScreenPoint(realPoint, 0); } private int getLabelLinePos(Align align) { int pos = 4; if (align == Align.LEFT) { pos = -pos; } return pos; } /** * Transforms a screen point to a real coordinates point. * * @param screenX the screen x axis value * @param screenY the screen y axis value * @return the real coordinates point */ public double[] toRealPoint(float screenX, float screenY, int scale) { double realMinX = mRenderer.getXAxisMin(scale); double realMaxX = mRenderer.getXAxisMax(scale); double realMinY = mRenderer.getYAxisMin(scale); double realMaxY = mRenderer.getYAxisMax(scale); if (!mRenderer.isMinXSet(scale) || !mRenderer.isMaxXSet(scale) || !mRenderer.isMinXSet(scale) || !mRenderer.isMaxYSet(scale)) { double[] calcRange = getCalcRange(scale); realMinX = calcRange[0]; realMaxX = calcRange[1]; realMinY = calcRange[2]; realMaxY = calcRange[3]; } if (mScreenR != null) { return new double[] { (screenX - mScreenR.left) * (realMaxX - realMinX) / mScreenR.width() + realMinX, (mScreenR.top + mScreenR.height() - screenY) * (realMaxY - realMinY) / mScreenR.height() + realMinY }; } else { return new double[] { screenX, screenY }; } } public double[] toScreenPoint(double[] realPoint, int scale) { double realMinX = mRenderer.getXAxisMin(scale); double realMaxX = mRenderer.getXAxisMax(scale); double realMinY = mRenderer.getYAxisMin(scale); double realMaxY = mRenderer.getYAxisMax(scale); if (!mRenderer.isMinXSet(scale) || !mRenderer.isMaxXSet(scale) || !mRenderer.isMinXSet(scale) || !mRenderer.isMaxYSet(scale)) { double[] calcRange = getCalcRange(scale); realMinX = calcRange[0]; realMaxX = calcRange[1]; realMinY = calcRange[2]; realMaxY = calcRange[3]; } if (mScreenR != null) { return new double[] { (realPoint[0] - realMinX) * mScreenR.width() / (realMaxX - realMinX) + mScreenR.left, (realMaxY - realPoint[1]) * mScreenR.height() / (realMaxY - realMinY) + mScreenR.top }; } else { return realPoint; } } public SeriesSelection getSeriesAndPointForScreenCoordinate(final Point screenPoint) { if (clickableAreas != null) for (int seriesIndex = clickableAreas.size() - 1; seriesIndex >= 0; seriesIndex--) { // series 0 is drawn first. Then series 1 is drawn on top, and series 2 // on top of that. // we want to know what the user clicked on, so traverse them in the // order they appear on the screen. int pointIndex = 0; if (clickableAreas.get(seriesIndex) != null) { RectF rectangle; for (ClickableArea area : clickableAreas.get(seriesIndex)) { if (area != null) { rectangle = area.getRect(); if (rectangle != null && rectangle.contains(screenPoint.getX(), screenPoint.getY())) { return new SeriesSelection(seriesIndex, pointIndex, area.getX(), area.getY()); } } pointIndex++; } } } return super.getSeriesAndPointForScreenCoordinate(screenPoint); } /** * The graphical representation of a series. * * @param canvas the canvas to paint to * @param paint the paint to be used for drawing * @param points the array of points to be used for drawing the series * @param seriesRenderer the series renderer * @param yAxisValue the minimum value of the y axis * @param seriesIndex the index of the series currently being drawn * @param startIndex the start index of the rendering points */ public abstract void drawSeries(Canvas canvas, Paint paint, List<Float> points, XYSeriesRenderer seriesRenderer, float yAxisValue, int seriesIndex, int startIndex); /** * Returns the clickable areas for all passed points * * @param points the array of points * @param values the array of values of each point * @param yAxisValue the minimum value of the y axis * @param seriesIndex the index of the series to which the points belong * @return an array of rectangles with the clickable area * @param startIndex the start index of the rendering points */ protected abstract ClickableArea[] clickableAreasForPoints(List<Float> points, List<Double> values, float yAxisValue, int seriesIndex, int startIndex); /** * Returns if the chart should display the null values. * * @return if null values should be rendered */ protected boolean isRenderNullValues() { return false; } /** * Returns if the chart should display the points as a certain shape. * * @param renderer the series renderer */ public boolean isRenderPoints(SimpleSeriesRenderer renderer) { return false; } /** * Returns the default axis minimum. * * @return the default axis minimum */ public double getDefaultMinimum() { return MathHelper.NULL_VALUE; } /** * Returns the scatter chart to be used for drawing the data points. * * @return the data points scatter chart */ public ScatterChart getPointsChart() { return null; } /** * Returns the chart type identifier. * * @return the chart type */ public abstract String getChartType(); }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kafka.streams.kstream.internals; import org.apache.kafka.common.serialization.Serdes; import org.apache.kafka.common.utils.Bytes; import org.apache.kafka.streams.Consumed; import org.apache.kafka.streams.KeyValue; import org.apache.kafka.streams.StreamsBuilder; import org.apache.kafka.streams.kstream.ForeachAction; import org.apache.kafka.streams.kstream.KStream; import org.apache.kafka.streams.kstream.Materialized; import org.apache.kafka.streams.kstream.Serialized; import org.apache.kafka.streams.kstream.TimeWindows; import org.apache.kafka.streams.kstream.Windowed; import org.apache.kafka.streams.kstream.TimeWindowedKStream; import org.apache.kafka.streams.state.WindowStore; import org.apache.kafka.test.KStreamTestDriver; import org.apache.kafka.test.MockAggregator; import org.apache.kafka.test.MockInitializer; import org.apache.kafka.test.MockReducer; import org.apache.kafka.test.StreamsTestUtils; import org.apache.kafka.test.TestUtils; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; public class TimeWindowedKStreamImplTest { private static final String TOPIC = "input"; private final StreamsBuilder builder = new StreamsBuilder(); @Rule public final KStreamTestDriver driver = new KStreamTestDriver(); private TimeWindowedKStream<String, String> windowedStream; @Before public void before() { final KStream<String, String> stream = builder.stream(TOPIC, Consumed.with(Serdes.String(), Serdes.String())); windowedStream = stream.groupByKey(Serialized.with(Serdes.String(), Serdes.String())) .windowedBy(TimeWindows.of(500L)); } @Test public void shouldCountWindowed() { final Map<Windowed<String>, Long> results = new HashMap<>(); windowedStream.count() .toStream() .foreach(new ForeachAction<Windowed<String>, Long>() { @Override public void apply(final Windowed<String> key, final Long value) { results.put(key, value); } }); processData(); assertThat(results.get(new Windowed<>("1", new TimeWindow(0, 500))), equalTo(2L)); assertThat(results.get(new Windowed<>("2", new TimeWindow(500, 1000))), equalTo(1L)); assertThat(results.get(new Windowed<>("1", new TimeWindow(500, 1000))), equalTo(1L)); } @Test public void shouldReduceWindowed() { final Map<Windowed<String>, String> results = new HashMap<>(); windowedStream.reduce(MockReducer.STRING_ADDER) .toStream() .foreach(new ForeachAction<Windowed<String>, String>() { @Override public void apply(final Windowed<String> key, final String value) { results.put(key, value); } }); processData(); assertThat(results.get(new Windowed<>("1", new TimeWindow(0, 500))), equalTo("1+2")); assertThat(results.get(new Windowed<>("2", new TimeWindow(500, 1000))), equalTo("1")); assertThat(results.get(new Windowed<>("1", new TimeWindow(500, 1000))), equalTo("3")); } @Test public void shouldAggregateWindowed() { final Map<Windowed<String>, String> results = new HashMap<>(); windowedStream.aggregate(MockInitializer.STRING_INIT, MockAggregator.TOSTRING_ADDER ) .toStream() .foreach(new ForeachAction<Windowed<String>, String>() { @Override public void apply(final Windowed<String> key, final String value) { results.put(key, value); } }); processData(); assertThat(results.get(new Windowed<>("1", new TimeWindow(0, 500))), equalTo("0+1+2")); assertThat(results.get(new Windowed<>("2", new TimeWindow(500, 1000))), equalTo("0+1")); assertThat(results.get(new Windowed<>("1", new TimeWindow(500, 1000))), equalTo("0+3")); } @SuppressWarnings("unchecked") @Test public void shouldMaterializeCount() { windowedStream.count(Materialized.<String, Long, WindowStore<Bytes, byte[]>>as("count-store") .withKeySerde(Serdes.String()) .withValueSerde(Serdes.Long())); processData(); final WindowStore<String, Long> windowStore = (WindowStore<String, Long>) driver.allStateStores().get("count-store"); final List<KeyValue<Windowed<String>, Long>> data = StreamsTestUtils.toList(windowStore.fetch("1", "2", 0, 1000)); assertThat(data, equalTo(Arrays.asList( KeyValue.pair(new Windowed<>("1", new TimeWindow(0, 500)), 2L), KeyValue.pair(new Windowed<>("1", new TimeWindow(500, 1000)), 1L), KeyValue.pair(new Windowed<>("2", new TimeWindow(500, 1000)), 1L)))); } @SuppressWarnings("unchecked") @Test public void shouldMaterializeReduced() { windowedStream.reduce(MockReducer.STRING_ADDER, Materialized.<String, String, WindowStore<Bytes, byte[]>>as("reduced") .withKeySerde(Serdes.String()) .withValueSerde(Serdes.String())); processData(); final WindowStore<String, String> windowStore = (WindowStore<String, String>) driver.allStateStores().get("reduced"); final List<KeyValue<Windowed<String>, String>> data = StreamsTestUtils.toList(windowStore.fetch("1", "2", 0, 1000)); assertThat(data, equalTo(Arrays.asList( KeyValue.pair(new Windowed<>("1", new TimeWindow(0, 500)), "1+2"), KeyValue.pair(new Windowed<>("1", new TimeWindow(500, 1000)), "3"), KeyValue.pair(new Windowed<>("2", new TimeWindow(500, 1000)), "1")))); } @SuppressWarnings("unchecked") @Test public void shouldMaterializeAggregated() { windowedStream.aggregate(MockInitializer.STRING_INIT, MockAggregator.TOSTRING_ADDER, Materialized.<String, String, WindowStore<Bytes, byte[]>>as("aggregated") .withKeySerde(Serdes.String()) .withValueSerde(Serdes.String())); processData(); final WindowStore<String, String> windowStore = (WindowStore<String, String>) driver.allStateStores().get("aggregated"); final List<KeyValue<Windowed<String>, String>> data = StreamsTestUtils.toList(windowStore.fetch("1", "2", 0, 1000)); assertThat(data, equalTo(Arrays.asList( KeyValue.pair(new Windowed<>("1", new TimeWindow(0, 500)), "0+1+2"), KeyValue.pair(new Windowed<>("1", new TimeWindow(500, 1000)), "0+3"), KeyValue.pair(new Windowed<>("2", new TimeWindow(500, 1000)), "0+1")))); } @Test(expected = NullPointerException.class) public void shouldThrowNullPointerOnAggregateIfInitializerIsNull() { windowedStream.aggregate(null, MockAggregator.TOSTRING_ADDER); } @Test(expected = NullPointerException.class) public void shouldThrowNullPointerOnAggregateIfAggregatorIsNull() { windowedStream.aggregate(MockInitializer.STRING_INIT, null); } @Test(expected = NullPointerException.class) public void shouldThrowNullPointerOnReduceIfReducerIsNull() { windowedStream.reduce(null); } @Test(expected = NullPointerException.class) public void shouldThrowNullPointerOnMaterializedAggregateIfInitializerIsNull() { windowedStream.aggregate(null, MockAggregator.TOSTRING_ADDER, Materialized.<String, String, WindowStore<Bytes, byte[]>>as("store")); } @Test(expected = NullPointerException.class) public void shouldThrowNullPointerOnMaterializedAggregateIfAggregatorIsNull() { windowedStream.aggregate(MockInitializer.STRING_INIT, null, Materialized.<String, String, WindowStore<Bytes, byte[]>>as("store")); } @SuppressWarnings("unchecked") @Test(expected = NullPointerException.class) public void shouldThrowNullPointerOnMaterializedAggregateIfMaterializedIsNull() { windowedStream.aggregate(MockInitializer.STRING_INIT, MockAggregator.TOSTRING_ADDER, (Materialized) null); } @Test(expected = NullPointerException.class) public void shouldThrowNullPointerOnMaterializedReduceIfReducerIsNull() { windowedStream.reduce(null, Materialized.<String, String, WindowStore<Bytes, byte[]>>as("store")); } @Test(expected = NullPointerException.class) public void shouldThrowNullPointerOnMaterializedReduceIfMaterializedIsNull() { windowedStream.reduce(MockReducer.STRING_ADDER, null); } @Test(expected = NullPointerException.class) public void shouldThrowNullPointerOnCountIfMaterializedIsNull() { windowedStream.count(null); } private void processData() { driver.setUp(builder, TestUtils.tempDirectory(), 0); driver.setTime(10); driver.process(TOPIC, "1", "1"); driver.setTime(15); driver.process(TOPIC, "1", "2"); driver.setTime(500); driver.process(TOPIC, "1", "3"); driver.process(TOPIC, "2", "1"); driver.flushState(); } }
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.identitymanagement.model; import java.io.Serializable; import com.amazonaws.AmazonWebServiceRequest; /** * */ public class ListAttachedRolePoliciesRequest extends AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The name (friendly name, not ARN) of the role to list attached policies * for. * </p> */ private String roleName; /** * <p> * The path prefix for filtering the results. This parameter is optional. If * it is not included, it defaults to a slash (/), listing all policies. * </p> */ private String pathPrefix; /** * <p> * Use this parameter only when paginating results and only after you * receive a response indicating that the results are truncated. Set it to * the value of the <code>Marker</code> element in the response that you * received to indicate where the next call should start. * </p> */ private String marker; /** * <p> * Use this only when paginating results to indicate the maximum number of * items you want in the response. If additional items exist beyond the * maximum you specify, the <code>IsTruncated</code> response element is * <code>true</code>. * </p> * <p> * This parameter is optional. If you do not include it, it defaults to 100. * Note that IAM might return fewer results, even when there are more * results available. In that case, the <code>IsTruncated</code> response * element returns <code>true</code> and <code>Marker</code> contains a * value to include in the subsequent call that tells the service where to * continue from. * </p> */ private Integer maxItems; /** * <p> * The name (friendly name, not ARN) of the role to list attached policies * for. * </p> * * @param roleName * The name (friendly name, not ARN) of the role to list attached * policies for. */ public void setRoleName(String roleName) { this.roleName = roleName; } /** * <p> * The name (friendly name, not ARN) of the role to list attached policies * for. * </p> * * @return The name (friendly name, not ARN) of the role to list attached * policies for. */ public String getRoleName() { return this.roleName; } /** * <p> * The name (friendly name, not ARN) of the role to list attached policies * for. * </p> * * @param roleName * The name (friendly name, not ARN) of the role to list attached * policies for. * @return Returns a reference to this object so that method calls can be * chained together. */ public ListAttachedRolePoliciesRequest withRoleName(String roleName) { setRoleName(roleName); return this; } /** * <p> * The path prefix for filtering the results. This parameter is optional. If * it is not included, it defaults to a slash (/), listing all policies. * </p> * * @param pathPrefix * The path prefix for filtering the results. This parameter is * optional. If it is not included, it defaults to a slash (/), * listing all policies. */ public void setPathPrefix(String pathPrefix) { this.pathPrefix = pathPrefix; } /** * <p> * The path prefix for filtering the results. This parameter is optional. If * it is not included, it defaults to a slash (/), listing all policies. * </p> * * @return The path prefix for filtering the results. This parameter is * optional. If it is not included, it defaults to a slash (/), * listing all policies. */ public String getPathPrefix() { return this.pathPrefix; } /** * <p> * The path prefix for filtering the results. This parameter is optional. If * it is not included, it defaults to a slash (/), listing all policies. * </p> * * @param pathPrefix * The path prefix for filtering the results. This parameter is * optional. If it is not included, it defaults to a slash (/), * listing all policies. * @return Returns a reference to this object so that method calls can be * chained together. */ public ListAttachedRolePoliciesRequest withPathPrefix(String pathPrefix) { setPathPrefix(pathPrefix); return this; } /** * <p> * Use this parameter only when paginating results and only after you * receive a response indicating that the results are truncated. Set it to * the value of the <code>Marker</code> element in the response that you * received to indicate where the next call should start. * </p> * * @param marker * Use this parameter only when paginating results and only after you * receive a response indicating that the results are truncated. Set * it to the value of the <code>Marker</code> element in the response * that you received to indicate where the next call should start. */ public void setMarker(String marker) { this.marker = marker; } /** * <p> * Use this parameter only when paginating results and only after you * receive a response indicating that the results are truncated. Set it to * the value of the <code>Marker</code> element in the response that you * received to indicate where the next call should start. * </p> * * @return Use this parameter only when paginating results and only after * you receive a response indicating that the results are truncated. * Set it to the value of the <code>Marker</code> element in the * response that you received to indicate where the next call should * start. */ public String getMarker() { return this.marker; } /** * <p> * Use this parameter only when paginating results and only after you * receive a response indicating that the results are truncated. Set it to * the value of the <code>Marker</code> element in the response that you * received to indicate where the next call should start. * </p> * * @param marker * Use this parameter only when paginating results and only after you * receive a response indicating that the results are truncated. Set * it to the value of the <code>Marker</code> element in the response * that you received to indicate where the next call should start. * @return Returns a reference to this object so that method calls can be * chained together. */ public ListAttachedRolePoliciesRequest withMarker(String marker) { setMarker(marker); return this; } /** * <p> * Use this only when paginating results to indicate the maximum number of * items you want in the response. If additional items exist beyond the * maximum you specify, the <code>IsTruncated</code> response element is * <code>true</code>. * </p> * <p> * This parameter is optional. If you do not include it, it defaults to 100. * Note that IAM might return fewer results, even when there are more * results available. In that case, the <code>IsTruncated</code> response * element returns <code>true</code> and <code>Marker</code> contains a * value to include in the subsequent call that tells the service where to * continue from. * </p> * * @param maxItems * Use this only when paginating results to indicate the maximum * number of items you want in the response. If additional items * exist beyond the maximum you specify, the <code>IsTruncated</code> * response element is <code>true</code>.</p> * <p> * This parameter is optional. If you do not include it, it defaults * to 100. Note that IAM might return fewer results, even when there * are more results available. In that case, the * <code>IsTruncated</code> response element returns * <code>true</code> and <code>Marker</code> contains a value to * include in the subsequent call that tells the service where to * continue from. */ public void setMaxItems(Integer maxItems) { this.maxItems = maxItems; } /** * <p> * Use this only when paginating results to indicate the maximum number of * items you want in the response. If additional items exist beyond the * maximum you specify, the <code>IsTruncated</code> response element is * <code>true</code>. * </p> * <p> * This parameter is optional. If you do not include it, it defaults to 100. * Note that IAM might return fewer results, even when there are more * results available. In that case, the <code>IsTruncated</code> response * element returns <code>true</code> and <code>Marker</code> contains a * value to include in the subsequent call that tells the service where to * continue from. * </p> * * @return Use this only when paginating results to indicate the maximum * number of items you want in the response. If additional items * exist beyond the maximum you specify, the * <code>IsTruncated</code> response element is <code>true</code> * .</p> * <p> * This parameter is optional. If you do not include it, it defaults * to 100. Note that IAM might return fewer results, even when there * are more results available. In that case, the * <code>IsTruncated</code> response element returns * <code>true</code> and <code>Marker</code> contains a value to * include in the subsequent call that tells the service where to * continue from. */ public Integer getMaxItems() { return this.maxItems; } /** * <p> * Use this only when paginating results to indicate the maximum number of * items you want in the response. If additional items exist beyond the * maximum you specify, the <code>IsTruncated</code> response element is * <code>true</code>. * </p> * <p> * This parameter is optional. If you do not include it, it defaults to 100. * Note that IAM might return fewer results, even when there are more * results available. In that case, the <code>IsTruncated</code> response * element returns <code>true</code> and <code>Marker</code> contains a * value to include in the subsequent call that tells the service where to * continue from. * </p> * * @param maxItems * Use this only when paginating results to indicate the maximum * number of items you want in the response. If additional items * exist beyond the maximum you specify, the <code>IsTruncated</code> * response element is <code>true</code>.</p> * <p> * This parameter is optional. If you do not include it, it defaults * to 100. Note that IAM might return fewer results, even when there * are more results available. In that case, the * <code>IsTruncated</code> response element returns * <code>true</code> and <code>Marker</code> contains a value to * include in the subsequent call that tells the service where to * continue from. * @return Returns a reference to this object so that method calls can be * chained together. */ public ListAttachedRolePoliciesRequest withMaxItems(Integer maxItems) { setMaxItems(maxItems); return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getRoleName() != null) sb.append("RoleName: " + getRoleName() + ","); if (getPathPrefix() != null) sb.append("PathPrefix: " + getPathPrefix() + ","); if (getMarker() != null) sb.append("Marker: " + getMarker() + ","); if (getMaxItems() != null) sb.append("MaxItems: " + getMaxItems()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ListAttachedRolePoliciesRequest == false) return false; ListAttachedRolePoliciesRequest other = (ListAttachedRolePoliciesRequest) obj; if (other.getRoleName() == null ^ this.getRoleName() == null) return false; if (other.getRoleName() != null && other.getRoleName().equals(this.getRoleName()) == false) return false; if (other.getPathPrefix() == null ^ this.getPathPrefix() == null) return false; if (other.getPathPrefix() != null && other.getPathPrefix().equals(this.getPathPrefix()) == false) return false; if (other.getMarker() == null ^ this.getMarker() == null) return false; if (other.getMarker() != null && other.getMarker().equals(this.getMarker()) == false) return false; if (other.getMaxItems() == null ^ this.getMaxItems() == null) return false; if (other.getMaxItems() != null && other.getMaxItems().equals(this.getMaxItems()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getRoleName() == null) ? 0 : getRoleName().hashCode()); hashCode = prime * hashCode + ((getPathPrefix() == null) ? 0 : getPathPrefix().hashCode()); hashCode = prime * hashCode + ((getMarker() == null) ? 0 : getMarker().hashCode()); hashCode = prime * hashCode + ((getMaxItems() == null) ? 0 : getMaxItems().hashCode()); return hashCode; } @Override public ListAttachedRolePoliciesRequest clone() { return (ListAttachedRolePoliciesRequest) super.clone(); } }
/**************************************************************** * Licensed to the AOS Community (AOS) under one or more * * contributor license agreements. See the NOTICE file * * distributed with this work for additional information * * regarding copyright ownership. The AOS licenses this file * * to you under the Apache License, Version 2.0 (the * * "License"); you may not use this file except in compliance * * with the License. You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, * * software distributed under the License is distributed on an * * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * * KIND, either express or implied. See the License for the * * specific language governing permissions and limitations * * under the License. * ****************************************************************/ /* * @(#)ElementTreePanel.java 1.19 10/03/23 * * Copyright (c) 2006, Oracle and/or its affiliates. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * -Redistribution of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * -Redistribution in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * Neither the name of Oracle or the names of contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * This software is provided "AS IS," without a warranty of any kind. ALL * EXPRESS OR IMPLIED CONDITIONS, REPRESENTATIONS AND WARRANTIES, INCLUDING * ANY IMPLIED WARRANTY OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE * OR NON-INFRINGEMENT, ARE HEREBY EXCLUDED. SUN MICROSYSTEMS, INC. ("SUN") * AND ITS LICENSORS SHALL NOT BE LIABLE FOR ANY DAMAGES SUFFERED BY LICENSEE * AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THIS SOFTWARE OR ITS * DERIVATIVES. IN NO EVENT WILL SUN OR ITS LICENSORS BE LIABLE FOR ANY LOST * REVENUE, PROFIT OR DATA, OR FOR DIRECT, INDIRECT, SPECIAL, CONSEQUENTIAL, * INCIDENTAL OR PUNITIVE DAMAGES, HOWEVER CAUSED AND REGARDLESS OF THE THEORY * OF LIABILITY, ARISING OUT OF THE USE OF OR INABILITY TO USE THIS SOFTWARE, * EVEN IF SUN HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. * * You acknowledge that this software is not designed, licensed or intended * for use in the design, construction, operation or maintenance of any * nuclear facility. */ /* * @(#)ElementTreePanel.java 1.19 10/03/23 */ import javax.swing.*; import javax.swing.event.*; import javax.swing.text.*; import javax.swing.tree.*; import javax.swing.undo.*; import java.awt.*; import java.beans.*; import java.util.*; /** * Displays a tree showing all the elements in a text Document. Selecting * a node will result in reseting the selection of the JTextComponent. * This also becomes a CaretListener to know when the selection has changed * in the text to update the selected item in the tree. * * @author Scott Violet * @version 1.19 03/23/10 */ public class ElementTreePanel extends JPanel implements CaretListener, DocumentListener, PropertyChangeListener, TreeSelectionListener { /** Tree showing the documents element structure. */ protected JTree tree; /** Text component showing elemenst for. */ protected JTextComponent editor; /** Model for the tree. */ protected ElementTreeModel treeModel; /** Set to true when updatin the selection. */ protected boolean updatingSelection; public ElementTreePanel(JTextComponent editor) { this.editor = editor; Document document = editor.getDocument(); // Create the tree. treeModel = new ElementTreeModel(document); tree = new JTree(treeModel) { public String convertValueToText(Object value, boolean selected, boolean expanded, boolean leaf, int row, boolean hasFocus) { // Should only happen for the root if(!(value instanceof Element)) return value.toString(); Element e = (Element)value; AttributeSet as = e.getAttributes().copyAttributes(); String asString; if(as != null) { StringBuffer retBuffer = new StringBuffer("["); Enumeration names = as.getAttributeNames(); while(names.hasMoreElements()) { Object nextName = names.nextElement(); if(nextName != StyleConstants.ResolveAttribute) { retBuffer.append(" "); retBuffer.append(nextName); retBuffer.append("="); retBuffer.append(as.getAttribute(nextName)); } } retBuffer.append(" ]"); asString = retBuffer.toString(); } else asString = "[ ]"; if(e.isLeaf()) return e.getName() + " [" + e.getStartOffset() + ", " + e.getEndOffset() +"] Attributes: " + asString; return e.getName() + " [" + e.getStartOffset() + ", " + e.getEndOffset() + "] Attributes: " + asString; } }; tree.addTreeSelectionListener(this); tree.setDragEnabled(true); // Don't show the root, it is fake. tree.setRootVisible(false); // Since the display value of every node after the insertion point // changes every time the text changes and we don't generate a change // event for all those nodes the display value can become off. // This can be seen as '...' instead of the complete string value. // This is a temporary workaround, increase the needed size by 15, // hoping that will be enough. tree.setCellRenderer(new DefaultTreeCellRenderer() { public Dimension getPreferredSize() { Dimension retValue = super.getPreferredSize(); if(retValue != null) retValue.width += 15; return retValue; } }); // become a listener on the document to update the tree. document.addDocumentListener(this); // become a PropertyChangeListener to know when the Document has // changed. editor.addPropertyChangeListener(this); // Become a CaretListener editor.addCaretListener(this); // configure the panel and frame containing it. setLayout(new BorderLayout()); add(new JScrollPane(tree), BorderLayout.CENTER); // Add a label above tree to describe what is being shown JLabel label = new JLabel("Elements that make up the current document", SwingConstants.CENTER); label.setFont(new Font("Dialog", Font.BOLD, 14)); add(label, BorderLayout.NORTH); setPreferredSize(new Dimension(400, 400)); } /** * Resets the JTextComponent to <code>editor</code>. This will update * the tree accordingly. */ public void setEditor(JTextComponent editor) { if (this.editor == editor) { return; } if (this.editor != null) { Document oldDoc = this.editor.getDocument(); oldDoc.removeDocumentListener(this); this.editor.removePropertyChangeListener(this); this.editor.removeCaretListener(this); } this.editor = editor; if (editor == null) { treeModel = null; tree.setModel(null); } else { Document newDoc = editor.getDocument(); newDoc.addDocumentListener(this); editor.addPropertyChangeListener(this); editor.addCaretListener(this); treeModel = new ElementTreeModel(newDoc); tree.setModel(treeModel); } } // PropertyChangeListener /** * Invoked when a property changes. We are only interested in when the * Document changes to reset the DocumentListener. */ public void propertyChange(PropertyChangeEvent e) { if (e.getSource() == getEditor() && e.getPropertyName().equals("document")) { JTextComponent editor = getEditor(); Document oldDoc = (Document)e.getOldValue(); Document newDoc = (Document)e.getNewValue(); // Reset the DocumentListener oldDoc.removeDocumentListener(this); newDoc.addDocumentListener(this); // Recreate the TreeModel. treeModel = new ElementTreeModel(newDoc); tree.setModel(treeModel); } } // DocumentListener /** * Gives notification that there was an insert into the document. The * given range bounds the freshly inserted region. * * @param e the document event */ public void insertUpdate(DocumentEvent e) { updateTree(e); } /** * Gives notification that a portion of the document has been * removed. The range is given in terms of what the view last * saw (that is, before updating sticky positions). * * @param e the document event */ public void removeUpdate(DocumentEvent e) { updateTree(e); } /** * Gives notification that an attribute or set of attributes changed. * * @param e the document event */ public void changedUpdate(DocumentEvent e) { updateTree(e); } // CaretListener /** * Messaged when the selection in the editor has changed. Will update * the selection in the tree. */ public void caretUpdate(CaretEvent e) { if(!updatingSelection) { JTextComponent editor = getEditor(); int selBegin = Math.min(e.getDot(), e.getMark()); int end = Math.max(e.getDot(), e.getMark()); Vector paths = new Vector(); TreeModel model = getTreeModel(); Object root = model.getRoot(); int rootCount = model.getChildCount(root); // Build an array of all the paths to all the character elements // in the selection. for(int counter = 0; counter < rootCount; counter++) { int start = selBegin; while(start <= end) { TreePath path = getPathForIndex(start, root, (Element)model.getChild(root, counter)); Element charElement = (Element)path. getLastPathComponent(); paths.addElement(path); if(start >= charElement.getEndOffset()) start++; else start = charElement.getEndOffset(); } } // If a path was found, select it (them). int numPaths = paths.size(); if(numPaths > 0) { TreePath[] pathArray = new TreePath[numPaths]; paths.copyInto(pathArray); updatingSelection = true; try { getTree().setSelectionPaths(pathArray); getTree().scrollPathToVisible(pathArray[0]); } finally { updatingSelection = false; } } } } // TreeSelectionListener /** * Called whenever the value of the selection changes. * @param e the event that characterizes the change. */ public void valueChanged(TreeSelectionEvent e) { JTree tree = getTree(); if(!updatingSelection && tree.getSelectionCount() == 1) { TreePath selPath = tree.getSelectionPath(); Object lastPathComponent = selPath.getLastPathComponent(); if(!(lastPathComponent instanceof DefaultMutableTreeNode)) { Element selElement = (Element)lastPathComponent; updatingSelection = true; try { getEditor().select(selElement.getStartOffset(), selElement.getEndOffset()); } finally { updatingSelection = false; } } } } // Local methods /** * @return tree showing elements. */ protected JTree getTree() { return tree; } /** * @return JTextComponent showing elements for. */ protected JTextComponent getEditor() { return editor; } /** * @return TreeModel implementation used to represent the elements. */ public DefaultTreeModel getTreeModel() { return treeModel; } /** * Updates the tree based on the event type. This will invoke either * updateTree with the root element, or handleChange. */ protected void updateTree(DocumentEvent event) { updatingSelection = true; try { TreeModel model = getTreeModel(); Object root = model.getRoot(); for(int counter = model.getChildCount(root) - 1; counter >= 0; counter--) { updateTree(event, (Element)model.getChild(root, counter)); } } finally { updatingSelection = false; } } /** * Creates TreeModelEvents based on the DocumentEvent and messages * the treemodel. This recursively invokes this method with children * elements. * @param event indicates what elements in the tree hierarchy have * changed. * @param element Current element to check for changes against. */ protected void updateTree(DocumentEvent event, Element element) { DocumentEvent.ElementChange ec = event.getChange(element); if (ec != null) { Element[] removed = ec.getChildrenRemoved(); Element[] added = ec.getChildrenAdded(); int startIndex = ec.getIndex(); // Check for removed. if(removed != null && removed.length > 0) { int[] indices = new int[removed.length]; for(int counter = 0; counter < removed.length; counter++) { indices[counter] = startIndex + counter; } getTreeModel().nodesWereRemoved((TreeNode)element, indices, removed); } // check for added if(added != null && added.length > 0) { int[] indices = new int[added.length]; for(int counter = 0; counter < added.length; counter++) { indices[counter] = startIndex + counter; } getTreeModel().nodesWereInserted((TreeNode)element, indices); } } if(!element.isLeaf()) { int startIndex = element.getElementIndex (event.getOffset()); int elementCount = element.getElementCount(); int endIndex = Math.min(elementCount - 1, element.getElementIndex (event.getOffset() + event.getLength())); if(startIndex > 0 && startIndex < elementCount && element.getElement(startIndex).getStartOffset() == event.getOffset()) { // Force checking the previous element. startIndex--; } if(startIndex != -1 && endIndex != -1) { for(int counter = startIndex; counter <= endIndex; counter++) { updateTree(event, element.getElement(counter)); } } } else { // Element is a leaf, assume it changed getTreeModel().nodeChanged((TreeNode)element); } } /** * Returns a TreePath to the element at <code>position</code>. */ protected TreePath getPathForIndex(int position, Object root, Element rootElement) { TreePath path = new TreePath(root); Element child = rootElement.getElement (rootElement.getElementIndex(position)); path = path.pathByAddingChild(rootElement); path = path.pathByAddingChild(child); while(!child.isLeaf()) { child = child.getElement(child.getElementIndex(position)); path = path.pathByAddingChild(child); } return path; } /** * ElementTreeModel is an implementation of TreeModel to handle displaying * the Elements from a Document. AbstractDocument.AbstractElement is * the default implementation used by the swing text package to implement * Element, and it implements TreeNode. This makes it trivial to create * a DefaultTreeModel rooted at a particular Element from the Document. * Unfortunately each Document can have more than one root Element. * Implying that to display all the root elements as a child of another * root a fake node has be created. This class creates a fake node as * the root with the children being the root elements of the Document * (getRootElements). * <p>This subclasses DefaultTreeModel. The majority of the TreeModel * methods have been subclassed, primarily to special case the root. */ public static class ElementTreeModel extends DefaultTreeModel { protected Element[] rootElements; public ElementTreeModel(Document document) { super(new DefaultMutableTreeNode("root"), false); rootElements = document.getRootElements(); } /** * Returns the child of <I>parent</I> at index <I>index</I> in * the parent's child array. <I>parent</I> must be a node * previously obtained from this data source. This should * not return null if <i>index</i> is a valid index for * <i>parent</i> (that is <i>index</i> >= 0 && <i>index</i> * < getChildCount(<i>parent</i>)). * * @param parent a node in the tree, obtained from this data source * @return the child of <I>parent</I> at index <I>index</I> */ public Object getChild(Object parent, int index) { if(parent == root) return rootElements[index]; return super.getChild(parent, index); } /** * Returns the number of children of <I>parent</I>. Returns 0 * if the node is a leaf or if it has no children. * <I>parent</I> must be a node previously obtained from this * data source. * * @param parent a node in the tree, obtained from this data source * @return the number of children of the node <I>parent</I> */ public int getChildCount(Object parent) { if(parent == root) return rootElements.length; return super.getChildCount(parent); } /** * Returns true if <I>node</I> is a leaf. It is possible for * this method to return false even if <I>node</I> has no * children. A directory in a filesystem, for example, may * contain no files; the node representing the directory is * not a leaf, but it also has no children. * * @param node a node in the tree, obtained from this data source * @return true if <I>node</I> is a leaf */ public boolean isLeaf(Object node) { if(node == root) return false; return super.isLeaf(node); } /** * Returns the index of child in parent. */ public int getIndexOfChild(Object parent, Object child) { if(parent == root) { for(int counter = rootElements.length - 1; counter >= 0; counter--) { if(rootElements[counter] == child) return counter; } return -1; } return super.getIndexOfChild(parent, child); } /** * Invoke this method after you've changed how node is to be * represented in the tree. */ public void nodeChanged(TreeNode node) { if(listenerList != null && node != null) { TreeNode parent = node.getParent(); if(parent == null && node != root) { parent = root; } if(parent != null) { int anIndex = getIndexOfChild(parent, node); if(anIndex != -1) { int[] cIndexs = new int[1]; cIndexs[0] = anIndex; nodesChanged(parent, cIndexs); } } } } /** * Returns the path to a particluar node. This is recursive. */ protected TreeNode[] getPathToRoot(TreeNode aNode, int depth) { TreeNode[] retNodes; /* Check for null, in case someone passed in a null node, or they passed in an element that isn't rooted at root. */ if(aNode == null) { if(depth == 0) return null; else retNodes = new TreeNode[depth]; } else { depth++; if(aNode == root) retNodes = new TreeNode[depth]; else { TreeNode parent = aNode.getParent(); if(parent == null) parent = root; retNodes = getPathToRoot(parent, depth); } retNodes[retNodes.length - depth] = aNode; } return retNodes; } } }
/* Copyright (C) 2013-2014 Computer Sciences Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Autogenerated by Thrift Compiler (0.9.1) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package com.cloudera.impala.extdatasource.thrift; import org.apache.thrift.scheme.IScheme; import org.apache.thrift.scheme.SchemeFactory; import org.apache.thrift.scheme.StandardScheme; import org.apache.thrift.scheme.TupleScheme; import org.apache.thrift.protocol.TTupleProtocol; import org.apache.thrift.protocol.TProtocolException; import org.apache.thrift.EncodingUtils; import org.apache.thrift.TException; import org.apache.thrift.async.AsyncMethodCallback; import org.apache.thrift.server.AbstractNonblockingServer.*; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.HashMap; import java.util.EnumMap; import java.util.Set; import java.util.HashSet; import java.util.EnumSet; import java.util.Collections; import java.util.BitSet; import java.nio.ByteBuffer; import java.util.Arrays; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class TColumnDesc implements org.apache.thrift.TBase<TColumnDesc, TColumnDesc._Fields>, java.io.Serializable, Cloneable, Comparable<TColumnDesc> { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TColumnDesc"); private static final org.apache.thrift.protocol.TField NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("name", org.apache.thrift.protocol.TType.STRING, (short)1); private static final org.apache.thrift.protocol.TField TYPE_FIELD_DESC = new org.apache.thrift.protocol.TField("type", org.apache.thrift.protocol.TType.STRUCT, (short)2); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new TColumnDescStandardSchemeFactory()); schemes.put(TupleScheme.class, new TColumnDescTupleSchemeFactory()); } public String name; // optional public com.cloudera.impala.thrift.TColumnType type; // optional /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { NAME((short)1, "name"), TYPE((short)2, "type"); private static final Map<String, _Fields> byName = new HashMap<String, _Fields>(); static { for (_Fields field : EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ public static _Fields findByThriftId(int fieldId) { switch(fieldId) { case 1: // NAME return NAME; case 2: // TYPE return TYPE; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ public static _Fields findByName(String name) { return byName.get(name); } private final short _thriftId; private final String _fieldName; _Fields(short thriftId, String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } public short getThriftFieldId() { return _thriftId; } public String getFieldName() { return _fieldName; } } // isset id assignments private _Fields optionals[] = {_Fields.NAME,_Fields.TYPE}; public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.NAME, new org.apache.thrift.meta_data.FieldMetaData("name", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.TYPE, new org.apache.thrift.meta_data.FieldMetaData("type", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, com.cloudera.impala.thrift.TColumnType.class))); metaDataMap = Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(TColumnDesc.class, metaDataMap); } public TColumnDesc() { } /** * Performs a deep copy on <i>other</i>. */ public TColumnDesc(TColumnDesc other) { if (other.isSetName()) { this.name = other.name; } if (other.isSetType()) { this.type = new com.cloudera.impala.thrift.TColumnType(other.type); } } public TColumnDesc deepCopy() { return new TColumnDesc(this); } @Override public void clear() { this.name = null; this.type = null; } public String getName() { return this.name; } public TColumnDesc setName(String name) { this.name = name; return this; } public void unsetName() { this.name = null; } /** Returns true if field name is set (has been assigned a value) and false otherwise */ public boolean isSetName() { return this.name != null; } public void setNameIsSet(boolean value) { if (!value) { this.name = null; } } public com.cloudera.impala.thrift.TColumnType getType() { return this.type; } public TColumnDesc setType(com.cloudera.impala.thrift.TColumnType type) { this.type = type; return this; } public void unsetType() { this.type = null; } /** Returns true if field type is set (has been assigned a value) and false otherwise */ public boolean isSetType() { return this.type != null; } public void setTypeIsSet(boolean value) { if (!value) { this.type = null; } } public void setFieldValue(_Fields field, Object value) { switch (field) { case NAME: if (value == null) { unsetName(); } else { setName((String)value); } break; case TYPE: if (value == null) { unsetType(); } else { setType((com.cloudera.impala.thrift.TColumnType)value); } break; } } public Object getFieldValue(_Fields field) { switch (field) { case NAME: return getName(); case TYPE: return getType(); } throw new IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ public boolean isSet(_Fields field) { if (field == null) { throw new IllegalArgumentException(); } switch (field) { case NAME: return isSetName(); case TYPE: return isSetType(); } throw new IllegalStateException(); } @Override public boolean equals(Object that) { if (that == null) return false; if (that instanceof TColumnDesc) return this.equals((TColumnDesc)that); return false; } public boolean equals(TColumnDesc that) { if (that == null) return false; boolean this_present_name = true && this.isSetName(); boolean that_present_name = true && that.isSetName(); if (this_present_name || that_present_name) { if (!(this_present_name && that_present_name)) return false; if (!this.name.equals(that.name)) return false; } boolean this_present_type = true && this.isSetType(); boolean that_present_type = true && that.isSetType(); if (this_present_type || that_present_type) { if (!(this_present_type && that_present_type)) return false; if (!this.type.equals(that.type)) return false; } return true; } @Override public int hashCode() { return 0; } @Override public int compareTo(TColumnDesc other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; lastComparison = Boolean.valueOf(isSetName()).compareTo(other.isSetName()); if (lastComparison != 0) { return lastComparison; } if (isSetName()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.name, other.name); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetType()).compareTo(other.isSetType()); if (lastComparison != 0) { return lastComparison; } if (isSetType()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.type, other.type); if (lastComparison != 0) { return lastComparison; } } return 0; } public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { schemes.get(iprot.getScheme()).getScheme().read(iprot, this); } public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { schemes.get(oprot.getScheme()).getScheme().write(oprot, this); } @Override public String toString() { StringBuilder sb = new StringBuilder("TColumnDesc("); boolean first = true; if (isSetName()) { sb.append("name:"); if (this.name == null) { sb.append("null"); } else { sb.append(this.name); } first = false; } if (isSetType()) { if (!first) sb.append(", "); sb.append("type:"); if (this.type == null) { sb.append("null"); } else { sb.append(this.type); } first = false; } sb.append(")"); return sb.toString(); } public void validate() throws org.apache.thrift.TException { // check for required fields // check for sub-struct validity if (type != null) { type.validate(); } } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException { try { read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private static class TColumnDescStandardSchemeFactory implements SchemeFactory { public TColumnDescStandardScheme getScheme() { return new TColumnDescStandardScheme(); } } private static class TColumnDescStandardScheme extends StandardScheme<TColumnDesc> { public void read(org.apache.thrift.protocol.TProtocol iprot, TColumnDesc struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) { schemeField = iprot.readFieldBegin(); if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { break; } switch (schemeField.id) { case 1: // NAME if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.name = iprot.readString(); struct.setNameIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 2: // TYPE if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) { struct.type = new com.cloudera.impala.thrift.TColumnType(); struct.type.read(iprot); struct.setTypeIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); // check for required fields of primitive type, which can't be checked in the validate method struct.validate(); } public void write(org.apache.thrift.protocol.TProtocol oprot, TColumnDesc struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); if (struct.name != null) { if (struct.isSetName()) { oprot.writeFieldBegin(NAME_FIELD_DESC); oprot.writeString(struct.name); oprot.writeFieldEnd(); } } if (struct.type != null) { if (struct.isSetType()) { oprot.writeFieldBegin(TYPE_FIELD_DESC); struct.type.write(oprot); oprot.writeFieldEnd(); } } oprot.writeFieldStop(); oprot.writeStructEnd(); } } private static class TColumnDescTupleSchemeFactory implements SchemeFactory { public TColumnDescTupleScheme getScheme() { return new TColumnDescTupleScheme(); } } private static class TColumnDescTupleScheme extends TupleScheme<TColumnDesc> { @Override public void write(org.apache.thrift.protocol.TProtocol prot, TColumnDesc struct) throws org.apache.thrift.TException { TTupleProtocol oprot = (TTupleProtocol) prot; BitSet optionals = new BitSet(); if (struct.isSetName()) { optionals.set(0); } if (struct.isSetType()) { optionals.set(1); } oprot.writeBitSet(optionals, 2); if (struct.isSetName()) { oprot.writeString(struct.name); } if (struct.isSetType()) { struct.type.write(oprot); } } @Override public void read(org.apache.thrift.protocol.TProtocol prot, TColumnDesc struct) throws org.apache.thrift.TException { TTupleProtocol iprot = (TTupleProtocol) prot; BitSet incoming = iprot.readBitSet(2); if (incoming.get(0)) { struct.name = iprot.readString(); struct.setNameIsSet(true); } if (incoming.get(1)) { struct.type = new com.cloudera.impala.thrift.TColumnType(); struct.type.read(iprot); struct.setTypeIsSet(true); } } } }
package org.kane.base.immutability.collections; import java.util.HashSet; import java.util.Iterator; import java.util.Set; import org.kane.base.immutability.ImmutableException; import org.kane.base.immutability.StandardImmutableObject; import org.kane.base.serialization.JavaCodeUtils; import org.kane.base.serialization.StandardObject; import com.thoughtworks.xstream.annotations.XStreamAlias; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; public class StandardImmutableFieldHashSetTest extends TestCase { @XStreamAlias("DummyObject-set") static private class DummyObject extends StandardImmutableObject { private FieldHashSet<String> set; transient private Iterator old_iterator; public int compareTo(Object o) { return 0; } public void normalize() {} public void validate() {} public void freeze() { set.freeze(); } public int hashCode() { return set.hashCode(); } public FieldHashSet getSimpleArrayList() { return set; } public DummyObject() { set = new FieldHashSet(); old_iterator = set.iterator(); verifyMutable(); set.add("foo"); complete(); verifyImmutable(); verifyOldIteratorImmutable(); } public DummyObject(Builder b) { set = new FieldHashSet(); } public boolean equals(Object obj) { if (!(obj instanceof DummyObject) ) return false; DummyObject other = (DummyObject)obj; if ( !getSimpleArrayList().equals(other.getSimpleArrayList()) ) return false; return true; } public void verifyMutable() { try { set.add("foo"); set.add("bar"); set.add("baz"); set.add("quz"); set.add("quuz"); assertEquals(set.size(),5); set.remove("quz"); set.remove("quuz"); assertEquals(set.size(),3); Set<String> quz_values = new HashSet(); quz_values.add("quz"); quz_values.add("quuz"); set.addAll(quz_values); assertEquals(set.size(),5); set.removeAll(quz_values); assertEquals(set.size(),3); set.addAll(quz_values); set.retainAll(quz_values); assertEquals(set,quz_values); set.remove(1); set.add("blaz"); Iterator<String> itr = set.iterator(); itr.next(); itr.remove(); // Verify that items can be removed using an iterator... set.clear(); assertEquals(set.size(),0); } catch(Exception e) { e.printStackTrace(); assert(false); } } public void verifyImmutable() { assert(!set.isEmpty()); // these tests require list contains at least one element... try { set.add("foo"); assert(false); } catch(ImmutableException e) { } try { set.remove("foo"); assert(false); } catch(ImmutableException e) { } Set<String> quz_values = new HashSet(); quz_values.add("quz"); quz_values.add("quuz"); try { set.addAll(quz_values); assert(false); } catch(ImmutableException e) { } try { set.removeAll(quz_values); assert(false); } catch(ImmutableException e) { } try { set.retainAll(quz_values); assert(false); } catch(ImmutableException e) { } try { set.remove(1); assert(false); } catch(ImmutableException e) { } try { Iterator<String> itr = set.iterator(); itr.next(); itr.remove(); // Verify that items can be removed using an iterator... assert(false); } catch(ImmutableException e) {} try { set.clear(); assert(false); } catch(ImmutableException e) { } } public void verifyOldIteratorImmutable() { try { old_iterator.remove(); assert(false); } catch(ImmutableException e) { } } static public class Builder { private DummyObject obj; public Builder() { obj = new DummyObject(this); } public void add(String el) { obj.assertNotComplete(); obj.set.add(el); } public DummyObject create() { obj.assertNotComplete(); obj.complete(); return obj; } } } /** * Create the test case * * @param testName name of the test case */ public StandardImmutableFieldHashSetTest( String testName ) { super( testName ); } /** * @return the suite of tests being tested */ public static Test suite() { return new TestSuite( StandardImmutableFieldHashSetTest.class ); } public void testCopyConstructor() { new DummyObject(); } public void testBuidler() { DummyObject.Builder builder = new DummyObject.Builder(); builder.obj.verifyMutable(); builder.add("foo"); DummyObject obj = builder.create(); obj.verifyImmutable(); // used to create the code for test serialization... System.out.println(obj.toJavaCode("obj")); } public void testSerialization() { String obj_as_xml_string = String.format("%s\r\n%s\r\n%s\r\n%s\r\n%s\r\n%s\r\n%s\r\n%s\r\n" , "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?>" , "<DummyObject-set>" , " <set>" , " <contents class=\"set\">" , " <string>foo</string>" , " </contents>" , " </set>" , "</DummyObject-set>" ); DummyObject obj = (DummyObject)StandardObject.fromXML(obj_as_xml_string); assertEquals(obj.set.size(),1); assert(obj.set.contains("foo")); obj.verifyImmutable(); } }
package de.tiiunder.flatmate.entities.expense; import java.io.Serializable; import java.util.Date; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonRootName; import de.tiiunder.flatmate.types.DateType; @JsonRootName("expense") public class Expense implements Serializable { @JsonProperty @JsonInclude(JsonInclude.Include.NON_DEFAULT) private int id; @JsonProperty("user_id") @JsonInclude(JsonInclude.Include.NON_DEFAULT) private int userId; @JsonProperty("category_id") @JsonInclude(JsonInclude.Include.NON_DEFAULT) private int categoryId; // used for GET @JsonProperty private int category; // used for POST @JsonProperty private DateType date; @JsonProperty private String name; @JsonProperty @JsonInclude(JsonInclude.Include.NON_DEFAULT) private double count; @JsonProperty @JsonInclude(JsonInclude.Include.NON_DEFAULT) private double consumption; @JsonProperty @JsonInclude(JsonInclude.Include.NON_DEFAULT) private double deposit; @JsonProperty @JsonInclude(JsonInclude.Include.NON_DEFAULT) private double fee; @JsonProperty("fee_period_count") @JsonInclude(JsonInclude.Include.NON_DEFAULT) private int feePeriodCount; @JsonProperty("fee_period_type") @JsonInclude(JsonInclude.Include.NON_DEFAULT) private int feePeriodType; @JsonProperty("cost_per_unit") @JsonInclude(JsonInclude.Include.NON_DEFAULT) private double costPerUnit; @JsonProperty("period_count") @JsonInclude(JsonInclude.Include.NON_DEFAULT) private int periodCount; @JsonProperty("period_type") @JsonInclude(JsonInclude.Include.NON_DEFAULT) private int periodType; public int getId() { return id; } public void setId(int id) { this.id = id; } public int getUserId() { return userId; } public void setUserId(int userId) { this.userId = userId; } public int getCategoryId() { return categoryId; } public void setCategoryId(int categoryId) { this.categoryId = categoryId; } public int getCategory() { return category; } public void setCategory(int category) { this.category = category; } public DateType getDate() { return date; } public void setDate(DateType date) { this.date = date; } public String getName() { return name; } public void setName(String name) { this.name = name; } public double getCount() { return count; } public void setCount(double count) { this.count = count; } public double getConsumption() { return consumption; } public void setConsumption(double consumption) { this.consumption = consumption; } public double getDeposit() { return deposit; } public void setDeposit(double deposit) { this.deposit = deposit; } public double getFee() { return fee; } public void setFee(double fee) { this.fee = fee; } public int getFeePeriodCount() { return feePeriodCount; } public void setFeePeriodCount(int feePeriodCount) { this.feePeriodCount = feePeriodCount; } public int getFeePeriodType() { return feePeriodType; } public void setFeePeriodType(int feePeriodType) { this.feePeriodType = feePeriodType; } public double getCostPerUnit() { return costPerUnit; } public void setCostPerUnit(double costPerUnit) { this.costPerUnit = costPerUnit; } public int getPeriodCount() { return periodCount; } public void setPeriodCount(int periodCount) { this.periodCount = periodCount; } public int getPeriodType() { return periodType; } public void setPeriodType(int periodType) { this.periodType = periodType; } @Override public String toString() { return "Expense{" + "id=" + id + ", userId=" + userId + ", categoryId=" + categoryId + ", date=" + date + ", name='" + name + '\'' + ", count=" + count + ", consumption=" + consumption + ", deposit=" + deposit + ", fee=" + fee + ", feePeriodCount=" + feePeriodCount + ", feePeriodType=" + feePeriodType + ", costPerUnit=" + costPerUnit + ", periodCount=" + periodCount + ", periodType=" + periodType + '}'; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.fs.s3a.impl; import javax.annotation.Nullable; import java.io.File; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import com.amazonaws.services.s3.model.DeleteObjectsRequest; import com.amazonaws.services.s3.model.MultiObjectDeleteException; import com.google.common.collect.Lists; import org.assertj.core.api.Assertions; import org.junit.Before; import org.junit.Test; import org.apache.commons.lang3.tuple.Pair; import org.apache.commons.lang3.tuple.Triple; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.s3a.Constants; import org.apache.hadoop.fs.s3a.Invoker; import org.apache.hadoop.fs.s3a.S3AFileStatus; import org.apache.hadoop.fs.s3a.S3AInputPolicy; import org.apache.hadoop.fs.s3a.S3AInstrumentation; import org.apache.hadoop.fs.s3a.S3AStorageStatistics; import org.apache.hadoop.fs.s3a.s3guard.BulkOperationState; import org.apache.hadoop.fs.s3a.s3guard.DirListingMetadata; import org.apache.hadoop.fs.s3a.s3guard.ITtlTimeProvider; import org.apache.hadoop.fs.s3a.s3guard.MetadataStore; import org.apache.hadoop.fs.s3a.s3guard.PathMetadata; import org.apache.hadoop.fs.s3a.s3guard.RenameTracker; import org.apache.hadoop.fs.s3a.s3guard.S3Guard; import org.apache.hadoop.io.retry.RetryPolicies; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.BlockingThreadPoolExecutorService; import static org.apache.hadoop.fs.s3a.impl.MultiObjectDeleteSupport.ACCESS_DENIED; import static org.apache.hadoop.fs.s3a.impl.MultiObjectDeleteSupport.removeUndeletedPaths; import static org.junit.Assert.assertEquals; /** * Unit test suite covering translation of AWS SDK exceptions to S3A exceptions, * and retry/recovery policies. */ public class TestPartialDeleteFailures { private static final ContextAccessors CONTEXT_ACCESSORS = new MinimalContextAccessor(); private StoreContext context; private static Path qualifyKey(String k) { return new Path("s3a://bucket/" + k); } @Before public void setUp() throws Exception { context = createMockStoreContext(true, new OperationTrackingStore()); } @Test public void testDeleteExtraction() { List<Path> src = pathList("a", "a/b", "a/c"); List<Path> rejected = pathList("a/b"); MultiObjectDeleteException ex = createDeleteException(ACCESS_DENIED, rejected); List<Path> undeleted = removeUndeletedPaths(ex, src, TestPartialDeleteFailures::qualifyKey); assertEquals("mismatch of rejected and undeleted entries", rejected, undeleted); } @Test public void testSplitKeysFromResults() throws Throwable { List<Path> src = pathList("a", "a/b", "a/c"); List<Path> rejected = pathList("a/b"); List<DeleteObjectsRequest.KeyVersion> keys = keysToDelete(src); MultiObjectDeleteException ex = createDeleteException(ACCESS_DENIED, rejected); Pair<List<Path>, List<Path>> pair = new MultiObjectDeleteSupport(context, null) .splitUndeletedKeys(ex, keys); List<Path> undeleted = pair.getLeft(); List<Path> deleted = pair.getRight(); assertEquals(rejected, undeleted); // now check the deleted list to verify that it is valid src.remove(rejected.get(0)); assertEquals(src, deleted); } /** * Build a list of qualified paths from vararg parameters. * @param paths paths to qualify and then convert to a lst. * @return same paths as a list. */ private List<Path> pathList(String... paths) { return Arrays.stream(paths) .map(TestPartialDeleteFailures::qualifyKey) .collect(Collectors.toList()); } /** * Build a delete exception containing all the rejected paths. * The list of successful entries is empty. * @param rejected the rejected paths. * @return a new exception */ private MultiObjectDeleteException createDeleteException( final String code, final List<Path> rejected) { List<MultiObjectDeleteException.DeleteError> errors = rejected.stream() .map((p) -> { MultiObjectDeleteException.DeleteError e = new MultiObjectDeleteException.DeleteError(); e.setKey(p.toUri().getPath()); e.setCode(code); e.setMessage("forbidden"); return e; }).collect(Collectors.toList()); return new MultiObjectDeleteException(errors, Collections.emptyList()); } /** * From a list of paths, build up the list of keys for a delete request. * @param paths path list * @return a key list suitable for a delete request. */ public static List<DeleteObjectsRequest.KeyVersion> keysToDelete( List<Path> paths) { return paths.stream() .map((p) -> p.toUri().getPath()) .map(DeleteObjectsRequest.KeyVersion::new) .collect(Collectors.toList()); } /** * Verify that on a partial delete, the S3Guard tables are updated * with deleted items. And only them. */ @Test public void testProcessDeleteFailure() throws Throwable { Path pathA = qualifyKey("/a"); Path pathAB = qualifyKey("/a/b"); Path pathAC = qualifyKey("/a/c"); List<Path> src = Lists.newArrayList(pathA, pathAB, pathAC); List<DeleteObjectsRequest.KeyVersion> keyList = keysToDelete(src); List<Path> deleteForbidden = Lists.newArrayList(pathAB); final List<Path> deleteAllowed = Lists.newArrayList(pathA, pathAC); MultiObjectDeleteException ex = createDeleteException(ACCESS_DENIED, deleteForbidden); OperationTrackingStore store = new OperationTrackingStore(); StoreContext storeContext = createMockStoreContext(true, store); MultiObjectDeleteSupport deleteSupport = new MultiObjectDeleteSupport(storeContext, null); Triple<List<Path>, List<Path>, List<Pair<Path, IOException>>> triple = deleteSupport.processDeleteFailure(ex, keyList); Assertions.assertThat(triple.getRight()) .as("failure list") .isEmpty(); List<Path> undeleted = triple.getLeft(); List<Path> deleted = triple.getMiddle(); Assertions.assertThat(deleted). as("deleted files") .containsAll(deleteAllowed) .doesNotContainAnyElementsOf(deleteForbidden); Assertions.assertThat(undeleted). as("undeleted store entries") .containsAll(deleteForbidden) .doesNotContainAnyElementsOf(deleteAllowed); } private StoreContext createMockStoreContext(boolean multiDelete, OperationTrackingStore store) throws URISyntaxException, IOException { URI name = new URI("s3a://bucket"); Configuration conf = new Configuration(); return new StoreContext( name, "bucket", conf, "alice", UserGroupInformation.getCurrentUser(), BlockingThreadPoolExecutorService.newInstance( 4, 4, 10, TimeUnit.SECONDS, "s3a-transfer-shared"), Constants.DEFAULT_EXECUTOR_CAPACITY, new Invoker(RetryPolicies.TRY_ONCE_THEN_FAIL, Invoker.LOG_EVENT), new S3AInstrumentation(name), new S3AStorageStatistics(), S3AInputPolicy.Normal, ChangeDetectionPolicy.createPolicy(ChangeDetectionPolicy.Mode.None, ChangeDetectionPolicy.Source.ETag, false), multiDelete, store, false, CONTEXT_ACCESSORS, new S3Guard.TtlTimeProvider(conf)); } private static class MinimalContextAccessor implements ContextAccessors { @Override public Path keyToPath(final String key) { return qualifyKey(key); } @Override public String pathToKey(final Path path) { return null; } @Override public File createTempFile(final String prefix, final long size) throws IOException { throw new UnsupportedOperationException("unsppported"); } @Override public String getBucketLocation() throws IOException { return null; } } /** * MetadataStore which tracks what is deleted and added. */ private static class OperationTrackingStore implements MetadataStore { private final List<Path> deleted = new ArrayList<>(); private final List<Path> created = new ArrayList<>(); @Override public void initialize(final FileSystem fs, ITtlTimeProvider ttlTimeProvider) { } @Override public void initialize(final Configuration conf, ITtlTimeProvider ttlTimeProvider) { } @Override public void forgetMetadata(final Path path) { } @Override public PathMetadata get(final Path path) { return null; } @Override public PathMetadata get(final Path path, final boolean wantEmptyDirectoryFlag) { return null; } @Override public DirListingMetadata listChildren(final Path path) { return null; } @Override public void put(final PathMetadata meta) { put(meta, null); } @Override public void put(final PathMetadata meta, final BulkOperationState operationState) { created.add(meta.getFileStatus().getPath()); } @Override public void put(final Collection<? extends PathMetadata> metas, final BulkOperationState operationState) { metas.stream().forEach(meta -> put(meta, null)); } @Override public void put(final DirListingMetadata meta, final List<Path> unchangedEntries, final BulkOperationState operationState) { created.add(meta.getPath()); } @Override public void destroy() { } @Override public void delete(final Path path, final BulkOperationState operationState) { deleted.add(path); } @Override public void deletePaths(final Collection<Path> paths, @Nullable final BulkOperationState operationState) throws IOException { deleted.addAll(paths); } @Override public void deleteSubtree(final Path path, final BulkOperationState operationState) { } @Override public void move(@Nullable final Collection<Path> pathsToDelete, @Nullable final Collection<PathMetadata> pathsToCreate, @Nullable final BulkOperationState operationState) { } @Override public void prune(final PruneMode pruneMode, final long cutoff) { } @Override public long prune(final PruneMode pruneMode, final long cutoff, final String keyPrefix) { return 0; } @Override public BulkOperationState initiateBulkWrite( final BulkOperationState.OperationType operation, final Path dest) { return null; } @Override public void setTtlTimeProvider(ITtlTimeProvider ttlTimeProvider) { } @Override public Map<String, String> getDiagnostics() { return null; } @Override public void updateParameters(final Map<String, String> parameters) { } @Override public void close() { } public List<Path> getDeleted() { return deleted; } public List<Path> getCreated() { return created; } @Override public RenameTracker initiateRenameOperation( final StoreContext storeContext, final Path source, final S3AFileStatus sourceStatus, final Path dest) { throw new UnsupportedOperationException("unsupported"); } @Override public void addAncestors(final Path qualifiedPath, @Nullable final BulkOperationState operationState) { } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.server.datanode; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.HardLink; import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeSpi; import org.apache.hadoop.io.IOUtils; import com.google.common.annotations.VisibleForTesting; /** * This class is used by datanodes to maintain meta data of its replicas. * It provides a general interface for meta information of a replica. */ @InterfaceAudience.Private abstract public class ReplicaInfo extends Block implements Replica { /** volume where the replica belongs */ private FsVolumeSpi volume; /** directory where block & meta files belong */ /** * Base directory containing numerically-identified sub directories and * possibly blocks. */ private File baseDir; /** * Ints representing the sub directory path from base dir to the directory * containing this replica. */ private int[] subDirs; private static final Map<String, File> internedBaseDirs = new HashMap<String, File>(); /** * Constructor for a zero length replica * @param blockId block id * @param genStamp replica generation stamp * @param vol volume where replica is located * @param dir directory path where block and meta files are located */ ReplicaInfo(long blockId, long genStamp, FsVolumeSpi vol, File dir) { this( blockId, 0L, genStamp, vol, dir); } /** * Constructor * @param block a block * @param vol volume where replica is located * @param dir directory path where block and meta files are located */ ReplicaInfo(Block block, FsVolumeSpi vol, File dir) { this(block.getBlockId(), block.getNumBytes(), block.getGenerationStamp(), vol, dir); } /** * Constructor * @param blockId block id * @param len replica length * @param genStamp replica generation stamp * @param vol volume where replica is located * @param dir directory path where block and meta files are located */ ReplicaInfo(long blockId, long len, long genStamp, FsVolumeSpi vol, File dir) { super(blockId, len, genStamp); this.volume = vol; setDirInternal(dir); } /** * Copy constructor. * @param from where to copy from */ ReplicaInfo(ReplicaInfo from) { this(from, from.getVolume(), from.getDir()); } /** * Get the full path of this replica's data file * @return the full path of this replica's data file */ public File getBlockFile() { return new File(getDir(), getBlockName()); } /** * Get the full path of this replica's meta file * @return the full path of this replica's meta file */ public File getMetaFile() { return new File(getDir(), DatanodeUtil.getMetaName(getBlockName(), getGenerationStamp())); } /** * Get the volume where this replica is located on disk * @return the volume where this replica is located on disk */ public FsVolumeSpi getVolume() { return volume; } /** * Set the volume where this replica is located on disk */ void setVolume(FsVolumeSpi vol) { this.volume = vol; } /** * Get the storageUuid of the volume that stores this replica. */ @Override public String getStorageUuid() { return volume.getStorageID(); } /** * Return the parent directory path where this replica is located * @return the parent directory path where this replica is located */ File getDir() { if (subDirs == null) { return null; } StringBuilder sb = new StringBuilder(); for (int i : subDirs) { sb.append(DataStorage.BLOCK_SUBDIR_PREFIX); sb.append(i); sb.append("/"); } File ret = new File(baseDir, sb.toString()); return ret; } /** * Set the parent directory where this replica is located * @param dir the parent directory where the replica is located */ public void setDir(File dir) { setDirInternal(dir); } private void setDirInternal(File dir) { if (dir == null) { subDirs = null; baseDir = null; return; } ReplicaDirInfo replicaDirInfo = parseSubDirs(dir); this.subDirs = replicaDirInfo.subDirs; synchronized (internedBaseDirs) { if (!internedBaseDirs.containsKey(replicaDirInfo.baseDirPath)) { // Create a new String path of this file and make a brand new File object // to guarantee we drop the reference to the underlying char[] storage. File baseDir = new File(replicaDirInfo.baseDirPath); internedBaseDirs.put(replicaDirInfo.baseDirPath, baseDir); } this.baseDir = internedBaseDirs.get(replicaDirInfo.baseDirPath); } } @VisibleForTesting public static class ReplicaDirInfo { @VisibleForTesting public String baseDirPath; @VisibleForTesting public int[] subDirs; } @VisibleForTesting public static ReplicaDirInfo parseSubDirs(File dir) { ReplicaDirInfo ret = new ReplicaDirInfo(); File currentDir = dir; List<Integer> subDirList = new ArrayList<Integer>(); while (currentDir.getName().startsWith(DataStorage.BLOCK_SUBDIR_PREFIX)) { // Prepend the integer into the list. subDirList.add(0, Integer.parseInt(currentDir.getName().replaceFirst( DataStorage.BLOCK_SUBDIR_PREFIX, ""))); currentDir = currentDir.getParentFile(); } ret.subDirs = new int[subDirList.size()]; for (int i = 0; i < subDirList.size(); i++) { ret.subDirs[i] = subDirList.get(i); } ret.baseDirPath = currentDir.getAbsolutePath(); return ret; } /** * check if this replica has already been unlinked. * @return true if the replica has already been unlinked * or no need to be detached; false otherwise */ public boolean isUnlinked() { return true; // no need to be unlinked } /** * set that this replica is unlinked */ public void setUnlinked() { // no need to be unlinked } /** * Copy specified file into a temporary file. Then rename the * temporary file to the original name. This will cause any * hardlinks to the original file to be removed. The temporary * files are created in the same directory. The temporary files will * be recovered (especially on Windows) on datanode restart. */ private void unlinkFile(File file, Block b) throws IOException { File tmpFile = DatanodeUtil.createTmpFile(b, DatanodeUtil.getUnlinkTmpFile(file)); try { FileInputStream in = new FileInputStream(file); try { FileOutputStream out = new FileOutputStream(tmpFile); try { IOUtils.copyBytes(in, out, 16*1024); } finally { out.close(); } } finally { in.close(); } if (file.length() != tmpFile.length()) { throw new IOException("Copy of file " + file + " size " + file.length()+ " into file " + tmpFile + " resulted in a size of " + tmpFile.length()); } FileUtil.replaceFile(tmpFile, file); } catch (IOException e) { boolean done = tmpFile.delete(); if (!done) { DataNode.LOG.info("detachFile failed to delete temporary file " + tmpFile); } throw e; } } /** * Remove a hard link by copying the block to a temporary place and * then moving it back * @param numLinks number of hard links * @return true if copy is successful; * false if it is already detached or no need to be detached * @throws IOException if there is any copy error */ public boolean unlinkBlock(int numLinks) throws IOException { if (isUnlinked()) { return false; } File file = getBlockFile(); if (file == null || getVolume() == null) { throw new IOException("detachBlock:Block not found. " + this); } File meta = getMetaFile(); if (HardLink.getLinkCount(file) > numLinks) { DataNode.LOG.info("CopyOnWrite for block " + this); unlinkFile(file, this); } if (HardLink.getLinkCount(meta) > numLinks) { unlinkFile(meta, this); } setUnlinked(); return true; } /** * Set this replica's generation stamp to be a newer one * @param newGS new generation stamp * @throws IOException is the new generation stamp is not greater than the current one */ void setNewerGenerationStamp(long newGS) throws IOException { long curGS = getGenerationStamp(); if (newGS <= curGS) { throw new IOException("New generation stamp (" + newGS + ") must be greater than current one (" + curGS + ")"); } setGenerationStamp(newGS); } @Override //Object public String toString() { return getClass().getSimpleName() + ", " + super.toString() + ", " + getState() + "\n getNumBytes() = " + getNumBytes() + "\n getBytesOnDisk() = " + getBytesOnDisk() + "\n getVisibleLength()= " + getVisibleLength() + "\n getVolume() = " + getVolume() + "\n getBlockFile() = " + getBlockFile(); } }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.google.android.apps.exposurenotification.nearby; import static com.google.common.truth.Truth.assertThat; import static org.junit.Assert.assertThrows; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.when; import android.content.Context; import android.net.Uri; import androidx.test.ext.junit.runners.AndroidJUnit4; import com.google.android.apps.exposurenotification.common.ExecutorsModule; import com.google.android.apps.exposurenotification.common.Qualifiers.BackgroundExecutor; import com.google.android.apps.exposurenotification.common.Qualifiers.LightweightExecutor; import com.google.android.apps.exposurenotification.common.Qualifiers.ScheduledExecutor; import com.google.android.apps.exposurenotification.keydownload.KeyFile; import com.google.android.apps.exposurenotification.keydownload.KeyFileConstants; import com.google.android.apps.exposurenotification.proto.TemporaryExposureKeyExport; import com.google.android.apps.exposurenotification.storage.DbModule; import com.google.android.apps.exposurenotification.storage.DownloadServerEntity; import com.google.android.apps.exposurenotification.storage.DownloadServerRepository; import com.google.android.apps.exposurenotification.storage.ExposureNotificationDatabase; import com.google.android.apps.exposurenotification.testsupport.ExposureNotificationRules; import com.google.android.apps.exposurenotification.testsupport.InMemoryDb; import com.google.android.gms.tasks.Tasks; import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; import com.google.common.primitives.Bytes; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.ListeningScheduledExecutorService; import com.google.common.util.concurrent.MoreExecutors; import com.google.common.util.concurrent.testing.TestingExecutors; import dagger.hilt.android.qualifiers.ApplicationContext; import dagger.hilt.android.testing.BindValue; import dagger.hilt.android.testing.HiltAndroidTest; import dagger.hilt.android.testing.HiltTestApplication; import dagger.hilt.android.testing.UninstallModules; import java.io.File; import java.io.FileOutputStream; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.atomic.AtomicInteger; import java.util.zip.ZipEntry; import java.util.zip.ZipOutputStream; import javax.inject.Inject; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import org.robolectric.annotation.Config; @HiltAndroidTest @RunWith(AndroidJUnit4.class) @Config(application = HiltTestApplication.class) @UninstallModules({ DbModule.class, ExecutorsModule.class, ExposureNotificationsClientModule.class }) public class KeyFileSubmitterTest { private static final AtomicInteger UNIQUE_INT = new AtomicInteger(1); private static final String HEADER_V1 = "EK Export v1"; private static final int HEADER_LEN = 16; @BindValue @BackgroundExecutor static final ExecutorService BACKGROUND_EXEC = MoreExecutors.newDirectExecutorService(); @BindValue @LightweightExecutor static final ExecutorService LIGHTWEIGHT_EXEC = MoreExecutors.newDirectExecutorService(); @BindValue @ScheduledExecutor static final ScheduledExecutorService SCHEDULED_EXEC = TestingExecutors.sameThreadScheduledExecutor(); @BindValue @BackgroundExecutor static final ListeningExecutorService BACKGROUND_LISTENING_EXEC = MoreExecutors.newDirectExecutorService(); @BindValue @LightweightExecutor static final ListeningExecutorService LIGHTWEIGHT_LISTENING_EXEC = MoreExecutors.newDirectExecutorService(); @BindValue @ScheduledExecutor static final ListeningScheduledExecutorService SCHEDULED_LISTENING_EXEC = TestingExecutors.sameThreadScheduledExecutor(); @Rule public ExposureNotificationRules rules = ExposureNotificationRules.forTest(this).withMocks().build(); @BindValue ExposureNotificationDatabase db = InMemoryDb.create(); @BindValue @Mock ExposureNotificationClientWrapper exposureNotificationClient; @Inject DownloadServerRepository downloadServerRepo; @Inject @ApplicationContext Context context; // The SUT @Inject DiagnosisKeyFileSubmitter submitter; @Before public void setUp() { rules.hilt().inject(); when(exposureNotificationClient.provideDiagnosisKeys(any())).thenReturn(Tasks.forResult(null)); } @Test public void shouldRememberLastSuccessfulFiles_oneForEachServer() throws Exception { // GIVEN Uri index1 = Uri.parse("http://example-1.com/index"); Uri index2 = Uri.parse("http://example-2.com/index"); Uri server1FileUri1 = Uri.parse("http://example-1.com/file1"); Uri server1FileUri2 = Uri.parse("http://example-1.com/file2"); Uri server2FileUri1 = Uri.parse("http://example-2.com/file1"); Uri server2FileUri2 = Uri.parse("http://example-2.com/file2"); File server1file1 = createFile(); File server1file2 = createFile(); File server2file1 = createFile(); File server2file2 = createFile(); KeyFile server1keyFile1 = KeyFile.create(index1, server1FileUri1, false).with(server1file1); KeyFile server1keyFile2 = KeyFile.create(index1, server1FileUri2, true).with(server1file2); KeyFile server2keyFile1 = KeyFile.create(index2, server2FileUri1, false).with(server2file1); KeyFile server2keyFile2 = KeyFile.create(index2, server2FileUri2, true).with(server2file2); // WHEN submitter.submitFiles( ImmutableList.of(server1keyFile1, server1keyFile2, server2keyFile1, server2keyFile2)) .get(); // THEN assertThat(downloadServerRepo.getMostRecentSuccessfulDownload(index1)) .isEqualTo(server1FileUri2); assertThat(downloadServerRepo.getMostRecentSuccessfulDownload(index2)) .isEqualTo(server2FileUri2); } @Test public void submitFails_shouldRememberPreviouslySuccessfulFiles() throws Exception { // GIVEN Uri index = Uri.parse("http://example-1.com/index"); Uri fileUri1 = Uri.parse("http://example-1.com/file1"); Uri fileUri2 = Uri.parse("http://example-1.com/file2"); File file1 = createFile(); File file2 = createFile(); KeyFile keyFile1 = KeyFile.create(index, fileUri1, true).with(file1); KeyFile keyFile2 = KeyFile.create(index, fileUri2, true).with(file2); downloadServerRepo.upsert(DownloadServerEntity.create(index, fileUri1)); // WHEN // First submit should succeed and remember the file. submitter.submitFiles(ImmutableList.of(keyFile1)).get(); assertThat(downloadServerRepo.getMostRecentSuccessfulDownload(index)).isEqualTo(fileUri1); // Set up the second submit to fail when(exposureNotificationClient.provideDiagnosisKeys(any())) .thenReturn(Tasks.forException(new RuntimeException("BOOOOOM!"))); assertThrows( ExecutionException.class, () -> submitter.submitFiles(ImmutableList.of(keyFile2)).get()); // THEN // So we should still remember the first file. assertThat(downloadServerRepo.getMostRecentSuccessfulDownload(index)).isEqualTo(fileUri1); } /** * Creates a structurally compliant but empty keyfile and writes it to disk. */ private File createFile() throws Exception { File outFile = new File( context.getFilesDir(), String.format("test-keyfile-%s.zip", UNIQUE_INT.incrementAndGet())); try (ZipOutputStream out = new ZipOutputStream(new FileOutputStream(outFile))) { ZipEntry signatureEntry = new ZipEntry(KeyFileConstants.SIG_FILENAME); ZipEntry exportEntry = new ZipEntry(KeyFileConstants.EXPORT_FILENAME); byte[] exportBytes = Bytes.concat( Strings.padEnd(HEADER_V1, HEADER_LEN, ' ').getBytes(), TemporaryExposureKeyExport.getDefaultInstance().toByteArray()); out.putNextEntry(signatureEntry); out.write("signature".getBytes()); out.putNextEntry(exportEntry); out.write(exportBytes); return outFile; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.runtime.operators.window.slicing; import org.apache.flink.annotation.Internal; import org.apache.flink.annotation.VisibleForTesting; import org.apache.flink.api.common.functions.RuntimeContext; import org.apache.flink.api.common.state.ListState; import org.apache.flink.api.common.state.ListStateDescriptor; import org.apache.flink.api.common.typeutils.base.LongSerializer; import org.apache.flink.metrics.Counter; import org.apache.flink.metrics.Gauge; import org.apache.flink.metrics.Meter; import org.apache.flink.metrics.MeterView; import org.apache.flink.runtime.memory.MemoryManager; import org.apache.flink.runtime.state.KeyedStateBackend; import org.apache.flink.runtime.state.StateInitializationContext; import org.apache.flink.runtime.state.StateSnapshotContext; import org.apache.flink.streaming.api.operators.ChainingStrategy; import org.apache.flink.streaming.api.operators.InternalTimer; import org.apache.flink.streaming.api.operators.InternalTimerService; import org.apache.flink.streaming.api.operators.KeyContext; import org.apache.flink.streaming.api.operators.OneInputStreamOperator; import org.apache.flink.streaming.api.operators.Output; import org.apache.flink.streaming.api.operators.TimestampedCollector; import org.apache.flink.streaming.api.operators.Triggerable; import org.apache.flink.streaming.api.watermark.Watermark; import org.apache.flink.streaming.runtime.streamrecord.StreamRecord; import org.apache.flink.table.data.RowData; import org.apache.flink.table.runtime.operators.TableStreamOperator; import org.apache.flink.table.runtime.operators.aggregate.window.processors.SliceSharedWindowAggProcessor; import static org.apache.flink.util.Preconditions.checkNotNull; /** * The {@link SlicingWindowOperator} implements an optimized processing for aligned windows which * can apply the slicing optimization. The core idea of slicing optimization is to divide all * elements from a data stream into a finite number of non-overlapping chunks (a.k.a. slices). * * <h3>Concept of Aligned Window and Unaligned Window</h3> * * <p>We divide windows into 2 categories: Aligned Windows and Unaligned Windows. * * <p>Aligned Windows are windows have predetermined window boundaries and windows can be divided * into finite number of non-overlapping chunks. The boundary of an aligned window is determined * independently from the time characteristic of the data stream, or messages it receives. For * example, hopping (sliding) window is an aligned window as the window boundaries are predetermined * based on the window size and slide. Aligned windows include tumbling, hopping, cumulative * windows. * * <p>Unaligned Windows are windows determined dynamically based on elements. For example, session * window is an unaligned window as the window boundaries are determined based on the messages * timestamps and their correlations. Currently, unaligned windows include session window only. * * <p>Because aligned windows can be divided into finite number of non-overlapping chunks (a.k.a. * slices), which can apply efficient processing to share intermediate results. * * <h3>Concept of Slice</h3> * * <p>Dividing a window of aligned windows into a finite number of non-overlapping chunks, where the * chunks are slices. It has the following properties: * * <ul> * <li>An element must only belong to a single slice. * <li>Slices are non-overlapping, i.e. S_i and S_j should not have any shared elements if i != j. * <li>A window is consist of a finite number of slices. * </ul> * * <h3>Abstraction of Slicing Window Operator</h3> * * <p>A slicing window operator is a simple wrap of {@link SlicingWindowProcessor}. It delegates all * the important methods to the underlying processor, where the processor can have different * implementation for aggregate and topk or others. * * <p>A {@link SlicingWindowProcessor} usually leverages the {@link SliceAssigner} to assign slices * and calculate based on the slices. See {@link SliceSharedWindowAggProcessor} as an example. * * <p>Note: since {@link SlicingWindowProcessor} leverages slicing optimization for aligned windows, * therefore, it doesn't support unaligned windows, e.g. session window. * * <p>Note: currently, {@link SlicingWindowOperator} doesn't support early-fire and late-arrival. * Thus late elements (elements belong to emitted windows) will be simply dropped. */ @Internal public final class SlicingWindowOperator<K, W> extends TableStreamOperator<RowData> implements OneInputStreamOperator<RowData, RowData>, Triggerable<K, W>, KeyContext { private static final long serialVersionUID = 1L; private static final String LATE_ELEMENTS_DROPPED_METRIC_NAME = "numLateRecordsDropped"; private static final String LATE_ELEMENTS_DROPPED_RATE_METRIC_NAME = "lateRecordsDroppedRate"; private static final String WATERMARK_LATENCY_METRIC_NAME = "watermarkLatency"; /** The concrete window operator implementation. */ private final SlicingWindowProcessor<W> windowProcessor; // ------------------------------------------------------------------------ /** This is used for emitting elements with a given timestamp. */ protected transient TimestampedCollector<RowData> collector; /** The service to register timers. */ private transient InternalTimerService<W> internalTimerService; /** The tracked processing time triggered last time. */ private transient long lastTriggeredProcessingTime; /** The operator state to store watermark. */ private transient ListState<Long> watermarkState; // ------------------------------------------------------------------------ // Metrics // ------------------------------------------------------------------------ private transient Counter numLateRecordsDropped; private transient Meter lateRecordsDroppedRate; private transient Gauge<Long> watermarkLatency; public SlicingWindowOperator(SlicingWindowProcessor<W> windowProcessor) { this.windowProcessor = windowProcessor; setChainingStrategy(ChainingStrategy.ALWAYS); } @Override public void open() throws Exception { super.open(); lastTriggeredProcessingTime = Long.MIN_VALUE; collector = new TimestampedCollector<>(output); collector.eraseTimestamp(); internalTimerService = getInternalTimerService( "window-timers", windowProcessor.createWindowSerializer(), this); windowProcessor.open( new WindowProcessorContext<>( getContainingTask(), getContainingTask().getEnvironment().getMemoryManager(), computeMemorySize(), internalTimerService, getKeyedStateBackend(), collector, getRuntimeContext())); // initialize watermark windowProcessor.initializeWatermark(currentWatermark); // metrics this.numLateRecordsDropped = metrics.counter(LATE_ELEMENTS_DROPPED_METRIC_NAME); this.lateRecordsDroppedRate = metrics.meter( LATE_ELEMENTS_DROPPED_RATE_METRIC_NAME, new MeterView(numLateRecordsDropped)); this.watermarkLatency = metrics.gauge( WATERMARK_LATENCY_METRIC_NAME, () -> { long watermark = internalTimerService.currentWatermark(); if (watermark < 0) { return 0L; } else { return internalTimerService.currentProcessingTime() - watermark; } }); } @Override public void initializeState(StateInitializationContext context) throws Exception { super.initializeState(context); ListStateDescriptor<Long> watermarkStateDesc = new ListStateDescriptor<>("watermark", LongSerializer.INSTANCE); this.watermarkState = context.getOperatorStateStore().getUnionListState(watermarkStateDesc); if (context.isRestored()) { Iterable<Long> watermarks = watermarkState.get(); if (watermarks != null) { Long minWatermark = Long.MAX_VALUE; for (Long watermark : watermarks) { minWatermark = Math.min(watermark, minWatermark); } if (minWatermark != Long.MAX_VALUE) { this.currentWatermark = minWatermark; } } } } @Override public void snapshotState(StateSnapshotContext context) throws Exception { super.snapshotState(context); this.watermarkState.clear(); this.watermarkState.add(currentWatermark); } @Override public void close() throws Exception { super.close(); collector = null; windowProcessor.close(); } @Override public void processElement(StreamRecord<RowData> element) throws Exception { RowData inputRow = element.getValue(); RowData currentKey = (RowData) getCurrentKey(); boolean isElementDropped = windowProcessor.processElement(currentKey, inputRow); if (isElementDropped) { // markEvent will increase numLateRecordsDropped lateRecordsDroppedRate.markEvent(); } } @Override public void processWatermark(Watermark mark) throws Exception { if (mark.getTimestamp() > currentWatermark) { windowProcessor.advanceProgress(mark.getTimestamp()); super.processWatermark(mark); } else { super.processWatermark(new Watermark(currentWatermark)); } } @Override public void onEventTime(InternalTimer<K, W> timer) throws Exception { onTimer(timer); } @Override public void onProcessingTime(InternalTimer<K, W> timer) throws Exception { if (timer.getTimestamp() > lastTriggeredProcessingTime) { // similar to the watermark advance, // we need to notify WindowProcessor first to flush buffer into state lastTriggeredProcessingTime = timer.getTimestamp(); windowProcessor.advanceProgress(timer.getTimestamp()); // timers registered in advanceProgress() should always be smaller than current timer // so, it should be safe to trigger current timer straightforwards. } onTimer(timer); } private void onTimer(InternalTimer<K, W> timer) throws Exception { setCurrentKey(timer.getKey()); W window = timer.getNamespace(); windowProcessor.fireWindow(window); windowProcessor.clearWindow(window); // we don't need to clear window timers, // because there should only be one timer for each window now, which is current timer. } @Override public void prepareSnapshotPreBarrier(long checkpointId) throws Exception { windowProcessor.prepareCheckpoint(); } /** Context implementation for {@link SlicingWindowProcessor.Context}. */ private static final class WindowProcessorContext<W> implements SlicingWindowProcessor.Context<W> { private final Object operatorOwner; private final MemoryManager memoryManager; private final long memorySize; private final InternalTimerService<W> timerService; private final KeyedStateBackend<RowData> keyedStateBackend; private final Output<RowData> collector; private final RuntimeContext runtimeContext; private WindowProcessorContext( Object operatorOwner, MemoryManager memoryManager, long memorySize, InternalTimerService<W> timerService, KeyedStateBackend<RowData> keyedStateBackend, Output<RowData> collector, RuntimeContext runtimeContext) { this.operatorOwner = operatorOwner; this.memoryManager = memoryManager; this.memorySize = memorySize; this.timerService = timerService; this.keyedStateBackend = checkNotNull(keyedStateBackend); this.collector = checkNotNull(collector); this.runtimeContext = checkNotNull(runtimeContext); } @Override public Object getOperatorOwner() { return operatorOwner; } @Override public MemoryManager getMemoryManager() { return memoryManager; } @Override public long getMemorySize() { return memorySize; } @Override public KeyedStateBackend<RowData> getKeyedStateBackend() { return keyedStateBackend; } @Override public InternalTimerService<W> getTimerService() { return timerService; } @Override public void output(RowData result) { collector.collect(result); } @Override public RuntimeContext getRuntimeContext() { return runtimeContext; } } // ------------------------------------------------------------------------------ // Visible For Testing // ------------------------------------------------------------------------------ @VisibleForTesting public Counter getNumLateRecordsDropped() { return numLateRecordsDropped; } @VisibleForTesting public Gauge<Long> getWatermarkLatency() { return watermarkLatency; } }
/* * Copyright (c) 2005, 2006, Oracle and/or its affiliates. All rights reserved. * ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms. * * * * * * * * * * * * * * * * * * * * */ package javax.swing.plaf.nimbus; import java.awt.Graphics; import java.awt.Image; import java.awt.Insets; /** * ImageScalingHelper * * @author Created by Jasper Potts (Aug 8, 2007) */ class ImageScalingHelper { /** Enumeration for the types of painting this class can handle. */ enum PaintType { /** * Painting type indicating the image should be centered in the space provided. When used the <code>mask</code> * is ignored. */ CENTER, /** * Painting type indicating the image should be tiled across the specified width and height. When used the * <code>mask</code> is ignored. */ TILE, /** * Painting type indicating the image should be split into nine regions with the top, left, bottom and right * areas stretched. */ PAINT9_STRETCH, /** * Painting type indicating the image should be split into nine regions with the top, left, bottom and right * areas tiled. */ PAINT9_TILE } ; private static final Insets EMPTY_INSETS = new Insets(0, 0, 0, 0); static final int PAINT_TOP_LEFT = 1; static final int PAINT_TOP = 2; static final int PAINT_TOP_RIGHT = 4; static final int PAINT_LEFT = 8; static final int PAINT_CENTER = 16; static final int PAINT_RIGHT = 32; static final int PAINT_BOTTOM_RIGHT = 64; static final int PAINT_BOTTOM = 128; static final int PAINT_BOTTOM_LEFT = 256; /** * Specifies that all regions should be painted. If this is set any other regions specified will not be painted. * For example PAINT_ALL | PAINT_CENTER will paint all but the center. */ static final int PAINT_ALL = 512; /** * Paints using the algorightm specified by <code>paintType</code>. * * @param g Graphics to render to * @param x X-coordinate * @param y Y-coordinate * @param w Width to render to * @param h Height to render to * @param image Image to render from, if <code>null</code> this method will do nothing * @param sInsets Insets specifying the portion of the image that will be stretched or tiled, if <code>null</code> * empty <code>Insets</code> will be used. * @param dInsets Destination insets specifying the portion of the image will be stretched or tiled, if * <code>null</code> empty <code>Insets</code> will be used. * @param paintType Specifies what type of algorithm to use in painting * @param mask Specifies portion of image to render, if <code>PAINT_ALL</code> is specified, any other regions * specified will not be painted, for example PAINT_ALL | PAINT_CENTER paints everything but the * center. */ public static void paint(Graphics g, int x, int y, int w, int h, Image image, Insets sInsets, Insets dInsets, PaintType paintType, int mask) { if (image == null || image.getWidth(null) <= 0 || image.getHeight(null) <= 0) { return; } if (sInsets == null) { sInsets = EMPTY_INSETS; } if (dInsets == null) { dInsets = EMPTY_INSETS; } int iw = image.getWidth(null); int ih = image.getHeight(null); if (paintType == PaintType.CENTER) { // Center the image g.drawImage(image, x + (w - iw) / 2, y + (h - ih) / 2, null); } else if (paintType == PaintType.TILE) { // Tile the image int lastIY = 0; for (int yCounter = y, maxY = y + h; yCounter < maxY; yCounter += (ih - lastIY), lastIY = 0) { int lastIX = 0; for (int xCounter = x, maxX = x + w; xCounter < maxX; xCounter += (iw - lastIX), lastIX = 0) { int dx2 = Math.min(maxX, xCounter + iw - lastIX); int dy2 = Math.min(maxY, yCounter + ih - lastIY); g.drawImage(image, xCounter, yCounter, dx2, dy2, lastIX, lastIY, lastIX + dx2 - xCounter, lastIY + dy2 - yCounter, null); } } } else { int st = sInsets.top; int sl = sInsets.left; int sb = sInsets.bottom; int sr = sInsets.right; int dt = dInsets.top; int dl = dInsets.left; int db = dInsets.bottom; int dr = dInsets.right; // Constrain the insets to the size of the image if (st + sb > ih) { db = dt = sb = st = Math.max(0, ih / 2); } if (sl + sr > iw) { dl = dr = sl = sr = Math.max(0, iw / 2); } // Constrain the insets to the size of the region we're painting // in. if (dt + db > h) { dt = db = Math.max(0, h / 2 - 1); } if (dl + dr > w) { dl = dr = Math.max(0, w / 2 - 1); } boolean stretch = (paintType == PaintType.PAINT9_STRETCH); if ((mask & PAINT_ALL) != 0) { mask = (PAINT_ALL - 1) & ~mask; } if ((mask & PAINT_LEFT) != 0) { drawChunk(image, g, stretch, x, y + dt, x + dl, y + h - db, 0, st, sl, ih - sb, false); } if ((mask & PAINT_TOP_LEFT) != 0) { drawImage(image, g, x, y, x + dl, y + dt, 0, 0, sl, st); } if ((mask & PAINT_TOP) != 0) { drawChunk(image, g, stretch, x + dl, y, x + w - dr, y + dt, sl, 0, iw - sr, st, true); } if ((mask & PAINT_TOP_RIGHT) != 0) { drawImage(image, g, x + w - dr, y, x + w, y + dt, iw - sr, 0, iw, st); } if ((mask & PAINT_RIGHT) != 0) { drawChunk(image, g, stretch, x + w - dr, y + dt, x + w, y + h - db, iw - sr, st, iw, ih - sb, false); } if ((mask & PAINT_BOTTOM_RIGHT) != 0) { drawImage(image, g, x + w - dr, y + h - db, x + w, y + h, iw - sr, ih - sb, iw, ih); } if ((mask & PAINT_BOTTOM) != 0) { drawChunk(image, g, stretch, x + dl, y + h - db, x + w - dr, y + h, sl, ih - sb, iw - sr, ih, true); } if ((mask & PAINT_BOTTOM_LEFT) != 0) { drawImage(image, g, x, y + h - db, x + dl, y + h, 0, ih - sb, sl, ih); } if ((mask & PAINT_CENTER) != 0) { drawImage(image, g, x + dl, y + dt, x + w - dr, y + h - db, sl, st, iw - sr, ih - sb); } } } /** * Draws a portion of an image, stretched or tiled. * * @param image Image to render. * @param g Graphics to render to * @param stretch Whether the image should be stretched or timed in the * provided space. * @param dx1 X origin to draw to * @param dy1 Y origin to draw to * @param dx2 End x location to draw to * @param dy2 End y location to draw to * @param sx1 X origin to draw from * @param sy1 Y origin to draw from * @param sx2 Max x location to draw from * @param sy2 Max y location to draw from * @param xDirection Used if the image is not stretched. If true it * indicates the image should be tiled along the x axis. */ private static void drawChunk(Image image, Graphics g, boolean stretch, int dx1, int dy1, int dx2, int dy2, int sx1, int sy1, int sx2, int sy2, boolean xDirection) { if (dx2 - dx1 <= 0 || dy2 - dy1 <= 0 || sx2 - sx1 <= 0 || sy2 - sy1 <= 0) { // Bogus location, nothing to paint return; } if (stretch) { g.drawImage(image, dx1, dy1, dx2, dy2, sx1, sy1, sx2, sy2, null); } else { int xSize = sx2 - sx1; int ySize = sy2 - sy1; int deltaX; int deltaY; if (xDirection) { deltaX = xSize; deltaY = 0; } else { deltaX = 0; deltaY = ySize; } while (dx1 < dx2 && dy1 < dy2) { int newDX2 = Math.min(dx2, dx1 + xSize); int newDY2 = Math.min(dy2, dy1 + ySize); g.drawImage(image, dx1, dy1, newDX2, newDY2, sx1, sy1, sx1 + newDX2 - dx1, sy1 + newDY2 - dy1, null); dx1 += deltaX; dy1 += deltaY; } } } private static void drawImage(Image image, Graphics g, int dx1, int dy1, int dx2, int dy2, int sx1, int sy1, int sx2, int sy2) { // PENDING: is this necessary, will G2D do it for me? if (dx2 - dx1 <= 0 || dy2 - dy1 <= 0 || sx2 - sx1 <= 0 || sy2 - sy1 <= 0) { // Bogus location, nothing to paint return; } g.drawImage(image, dx1, dy1, dx2, dy2, sx1, sy1, sx2, sy2, null); } }
/* * Copyright 2015 Martin Strouhal. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.microrisc.simply.iqrf.dpa.v22x.examples.std_per.frc; import com.microrisc.simply.CallRequestProcessingState; import com.microrisc.simply.Network; import com.microrisc.simply.Node; import com.microrisc.simply.Simply; import com.microrisc.simply.SimplyException; import com.microrisc.simply.errors.CallRequestProcessingError; import com.microrisc.simply.iqrf.dpa.v22x.DPA_SimplyFactory; import com.microrisc.simply.iqrf.dpa.v22x.devices.FRC; import com.microrisc.simply.iqrf.dpa.v22x.types.FRC_Configuration; import com.microrisc.simply.iqrf.dpa.v22x.types.FRC_Data; import com.microrisc.simply.iqrf.dpa.v22x.types.FRC_ResponseTime; import com.microrisc.simply.iqrf.dpa.v22x.types.FRC_Temperature_18B20Idle; import com.microrisc.simply.iqrf.types.VoidType; import java.io.File; import java.util.Comparator; import java.util.Map; import java.util.SortedMap; import java.util.TreeMap; /** * This example shows, how to find out response time on FRC_Temperature Dallas * command, how to set maximal repsonse time in FRC configuration and finally, * shows sending FRC command to get temperature from all nodes over FRC. * * @author Martin Strouhal */ public class SendAndExtraResultDallas18B20 { // reference to Simply private static Simply simply = null; // prints out specified error description, destroys the Simply and exits private static void printMessageAndExit(String errorDescr) { System.out.println(errorDescr); if (simply != null) { simply.destroy(); } System.exit(1); } // processes NULL result private static void processNullResult(FRC frc, String errorMsg, String notProcMsg) { CallRequestProcessingState procState = frc.getCallRequestProcessingStateOfLastCall(); if (procState == CallRequestProcessingState.ERROR) { CallRequestProcessingError error = frc.getCallRequestProcessingErrorOfLastCall(); printMessageAndExit(errorMsg + ": " + error); } else { printMessageAndExit(notProcMsg + ": " + procState); } } // puts together both parts of incomming FRC data private static short[] getCompleteFrcData(short[] firstPart, short[] extraData) { short[] completeData = new short[firstPart.length + extraData.length]; System.arraycopy(firstPart, 0, completeData, 0, firstPart.length); System.arraycopy(extraData, 0, completeData, firstPart.length, extraData.length); return completeData; } private static class NodeIdComparator implements Comparator<String> { @Override public int compare(String nodeIdStr1, String nodeIdStr2) { int nodeId_1 = Integer.decode(nodeIdStr1); int nodeId_2 = Integer.decode(nodeIdStr2); return Integer.compare(nodeId_1, nodeId_2); } } // Node Id comparator private static final NodeIdComparator nodeIdComparator = new NodeIdComparator(); // sorting specified results according to node ID in ascendent manner private static SortedMap<String, FRC_Temperature_18B20Idle.Result> sortResult( Map<String, FRC_Temperature_18B20Idle.Result> result ) { TreeMap<String, FRC_Temperature_18B20Idle.Result> sortedResult = new TreeMap<>(nodeIdComparator); sortedResult.putAll(result); return sortedResult; } public static void main(String[] args) { // creating Simply instance try { simply = DPA_SimplyFactory.getSimply("config" + File.separator + "Simply.properties"); } catch (SimplyException ex) { printMessageAndExit("Error while creating Simply: " + ex.getMessage()); } // getting network 1 Network network1 = simply.getNetwork("1", Network.class); if (network1 == null) { printMessageAndExit("Network 1 doesn't exist"); } // getting a master node Node master = network1.getNode("0"); if (master == null) { printMessageAndExit("Master doesn't exist"); } // getting FRC interface FRC frc = master.getDeviceObject(FRC.class); if (frc == null) { printMessageAndExit("FRC doesn't exist or is not enabled"); } // For FRC peripheral must be set timeout: // 1) For typical standard FRC (can transfer up to 2B to the nodes) duration is lower than: // timeout = Bonded Nodes x 130 + _RESPONSE_FRC_TIME_xxx_MS + 250 [ms] // 2) Typical advanced FRC (can transfer up to 30B to the nodes) duration is lower than: // timeout for STD mode = Bonded Nodes x 150 + _RESPONSE_FRC_TIME_xxx_MS + 290 [ms]. // timeout for LP mode = Bonded Nodes x 200 + _RESPONSE_FRC_TIME_xxx_MS + 390 [ms]. // eg. for 5 bonded nodes and FRC response time 640ms // + overhead for Java framework = 2s short overhead = 2; boolean std = true; // indicates if is used STD or LP mode long timeout = overhead + 5 * (std ? 150 : 200) + (long)FRC_Configuration.FRC_RESPONSE_TIME.TIME_640_MS.getRepsonseTimeInInt() + (std ? 290 : 390); frc.setDefaultWaitingTimeout(timeout); // determining FRC configuration and getting configuration from first node with Dallas FRC_Configuration config = sendFRCResponseTime(frc); // sending of determined FRC configuration VoidType paramsResult = frc.setFRCParams(config); if (paramsResult == null) { processNullResult(frc, "Setting FRC params failed", "Setting FRC params hasn't been processed yet" ); } // getting temperature and its printing sendFRCTemperature(frc); // end working with Simply simply.destroy(); } private static FRC_Configuration sendFRCResponseTime(FRC frc) { // sending frc cmd for find response time, 0xC0 is ID of dallas temperature FRC FRC_Data frcData = frc.send(new FRC_ResponseTime(0xC0)); if (frcData == null) { processNullResult(frc, "Sending FRC command failed", "Sending FRC command hasn't been processed yet" ); } // getting extra result short[] frcExtraData = frc.extraResult(); if (frcExtraData == null) { processNullResult(frc, "Setting FRC extra result failed", "Setting FRC extra result hasn't been processed yet" ); } // save result to map Map<String, FRC_ResponseTime.Result> result = null; try { result = FRC_ResponseTime.parse( getCompleteFrcData(frcData.getData(), frcExtraData) ); } catch (Exception ex) { printMessageAndExit("Parsing result data failed: " + ex); } // node with Dallas FRC_ResponseTime.Result timeResult = result.get("1"); // return configuration with response time return timeResult.getResponseTimeAsConfiguration(); } private static void sendFRCTemperature(FRC frc) { // sending frc cmd FRC_Data frcData = frc.send(new FRC_Temperature_18B20Idle()); if (frcData == null) { processNullResult(frc, "Sending FRC command failed", "Sending FRC command hasn't been processed yet" ); } short[] frcExtraData = frc.extraResult(); if (frcExtraData == null) { processNullResult(frc, "Setting FRC extra result failed", "Setting FRC extra result hasn't been processed yet" ); } Map<String, FRC_Temperature_18B20Idle.Result> result = null; try { result = FRC_Temperature_18B20Idle.parse( getCompleteFrcData(frcData.getData(), frcExtraData) ); } catch (Exception ex) { printMessageAndExit("Parsing result data failed: " + ex); } // sort the results SortedMap<String, FRC_Temperature_18B20Idle.Result> sortedResult = sortResult(result); // printing temperature on each node for (Map.Entry<String, FRC_Temperature_18B20Idle.Result> dataEntry : sortedResult.entrySet()) { System.out.println("Node: " + dataEntry.getKey() + ", temperature: " + dataEntry.getValue().getFormattedTemperature()); } } }
/* * Licensed to Metamarkets Group Inc. (Metamarkets) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Metamarkets licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package io.druid.segment.incremental; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Enums; import com.google.common.base.Function; import com.google.common.base.Supplier; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterators; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.primitives.Ints; import com.google.common.primitives.Longs; import io.druid.collections.NonBlockingPool; import io.druid.data.input.InputRow; import io.druid.data.input.MapBasedRow; import io.druid.data.input.Row; import io.druid.data.input.impl.DimensionSchema; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.SpatialDimensionSchema; import io.druid.java.util.common.IAE; import io.druid.java.util.common.ISE; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularity; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.PostAggregator; import io.druid.query.dimension.DimensionSpec; import io.druid.query.groupby.RowBasedColumnSelectorFactory; import io.druid.segment.ColumnSelectorFactory; import io.druid.segment.DimensionHandler; import io.druid.segment.DimensionHandlerUtils; import io.druid.segment.DimensionIndexer; import io.druid.segment.DimensionSelector; import io.druid.segment.DoubleColumnSelector; import io.druid.segment.FloatColumnSelector; import io.druid.segment.LongColumnSelector; import io.druid.segment.Metadata; import io.druid.segment.ObjectColumnSelector; import io.druid.segment.VirtualColumns; import io.druid.segment.column.Column; import io.druid.segment.column.ColumnCapabilities; import io.druid.segment.column.ColumnCapabilitiesImpl; import io.druid.segment.column.ValueType; import io.druid.segment.serde.ComplexMetricExtractor; import io.druid.segment.serde.ComplexMetricSerde; import io.druid.segment.serde.ComplexMetrics; import org.joda.time.DateTime; import org.joda.time.Interval; import javax.annotation.Nullable; import javax.annotation.concurrent.GuardedBy; import java.io.Closeable; import java.lang.reflect.Array; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.Deque; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentLinkedDeque; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ConcurrentNavigableMap; import java.util.concurrent.ConcurrentSkipListMap; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.atomic.AtomicInteger; /** */ public abstract class IncrementalIndex<AggregatorType> implements Iterable<Row>, Closeable { private volatile DateTime maxIngestedEventTime; // Used to discover ValueType based on the class of values in a row // Also used to convert between the duplicate ValueType enums in DimensionSchema (druid-api) and main druid. private static final Map<Object, ValueType> TYPE_MAP = ImmutableMap.<Object, ValueType>builder() .put(Long.class, ValueType.LONG) .put(Double.class, ValueType.DOUBLE) .put(Float.class, ValueType.FLOAT) .put(String.class, ValueType.STRING) .put(DimensionSchema.ValueType.LONG, ValueType.LONG) .put(DimensionSchema.ValueType.FLOAT, ValueType.FLOAT) .put(DimensionSchema.ValueType.STRING, ValueType.STRING) .put(DimensionSchema.ValueType.DOUBLE, ValueType.DOUBLE) .build(); /** * Column selector used at ingestion time for inputs to aggregators. * * @param agg the aggregator * @param in ingestion-time input row supplier * @param deserializeComplexMetrics whether complex objects should be deserialized by a {@link ComplexMetricExtractor} * * @return column selector factory */ public static ColumnSelectorFactory makeColumnSelectorFactory( final VirtualColumns virtualColumns, final AggregatorFactory agg, final Supplier<InputRow> in, final boolean deserializeComplexMetrics ) { final RowBasedColumnSelectorFactory baseSelectorFactory = RowBasedColumnSelectorFactory.create(in, null); class IncrementalIndexInputRowColumnSelectorFactory implements ColumnSelectorFactory { @Override public LongColumnSelector makeLongColumnSelector(final String columnName) { return baseSelectorFactory.makeLongColumnSelector(columnName); } @Override public FloatColumnSelector makeFloatColumnSelector(final String columnName) { return baseSelectorFactory.makeFloatColumnSelector(columnName); } @Override public ObjectColumnSelector makeObjectColumnSelector(final String column) { final String typeName = agg.getTypeName(); final ObjectColumnSelector rawColumnSelector = baseSelectorFactory.makeObjectColumnSelector(column); if ((Enums.getIfPresent(ValueType.class, StringUtils.toUpperCase(typeName)).isPresent() && !typeName.equalsIgnoreCase(ValueType.COMPLEX.name())) || !deserializeComplexMetrics) { return rawColumnSelector; } else { final ComplexMetricSerde serde = ComplexMetrics.getSerdeForType(typeName); if (serde == null) { throw new ISE("Don't know how to handle type[%s]", typeName); } final ComplexMetricExtractor extractor = serde.getExtractor(); return new ObjectColumnSelector() { @Override public Class classOfObject() { return extractor.extractedClass(); } @Override public Object get() { return extractor.extractValue(in.get(), column); } }; } } @Override public DimensionSelector makeDimensionSelector(DimensionSpec dimensionSpec) { return baseSelectorFactory.makeDimensionSelector(dimensionSpec); } @Nullable @Override public ColumnCapabilities getColumnCapabilities(String columnName) { return baseSelectorFactory.getColumnCapabilities(columnName); } @Override public DoubleColumnSelector makeDoubleColumnSelector(String columnName) { return baseSelectorFactory.makeDoubleColumnSelector(columnName); } } return virtualColumns.wrap(new IncrementalIndexInputRowColumnSelectorFactory()); } private final long minTimestamp; private final Granularity gran; private final boolean rollup; private final List<Function<InputRow, InputRow>> rowTransformers; private final VirtualColumns virtualColumns; private final AggregatorFactory[] metrics; private final AggregatorType[] aggs; private final boolean deserializeComplexMetrics; private final boolean reportParseExceptions; private final Metadata metadata; private final Map<String, MetricDesc> metricDescs; private final Map<String, DimensionDesc> dimensionDescs; private final List<DimensionDesc> dimensionDescsList; private final Map<String, ColumnCapabilitiesImpl> columnCapabilities; private final AtomicInteger numEntries = new AtomicInteger(); // This is modified on add() in a critical section. private final ThreadLocal<InputRow> in = new ThreadLocal<>(); private final Supplier<InputRow> rowSupplier = in::get; /** * Setting deserializeComplexMetrics to false is necessary for intermediate aggregation such as groupBy that * should not deserialize input columns using ComplexMetricSerde for aggregators that return complex metrics. * * Set concurrentEventAdd to true to indicate that adding of input row should be thread-safe (for example, groupBy * where the multiple threads can add concurrently to the IncrementalIndex). * * @param incrementalIndexSchema the schema to use for incremental index * @param deserializeComplexMetrics flag whether or not to call ComplexMetricExtractor.extractValue() on the input * value for aggregators that return metrics other than float. * @param reportParseExceptions flag whether or not to report ParseExceptions that occur while extracting values * from input rows * @param concurrentEventAdd flag whether ot not adding of input rows should be thread-safe */ protected IncrementalIndex( final IncrementalIndexSchema incrementalIndexSchema, final boolean deserializeComplexMetrics, final boolean reportParseExceptions, final boolean concurrentEventAdd ) { this.minTimestamp = incrementalIndexSchema.getMinTimestamp(); this.gran = incrementalIndexSchema.getGran(); this.rollup = incrementalIndexSchema.isRollup(); this.virtualColumns = incrementalIndexSchema.getVirtualColumns(); this.metrics = incrementalIndexSchema.getMetrics(); this.rowTransformers = new CopyOnWriteArrayList<>(); this.deserializeComplexMetrics = deserializeComplexMetrics; this.reportParseExceptions = reportParseExceptions; this.columnCapabilities = Maps.newHashMap(); this.metadata = new Metadata() .setAggregators(getCombiningAggregators(metrics)) .setTimestampSpec(incrementalIndexSchema.getTimestampSpec()) .setQueryGranularity(this.gran) .setRollup(this.rollup); this.aggs = initAggs(metrics, rowSupplier, deserializeComplexMetrics, concurrentEventAdd); this.metricDescs = Maps.newLinkedHashMap(); for (AggregatorFactory metric : metrics) { MetricDesc metricDesc = new MetricDesc(metricDescs.size(), metric); metricDescs.put(metricDesc.getName(), metricDesc); columnCapabilities.put(metricDesc.getName(), metricDesc.getCapabilities()); } DimensionsSpec dimensionsSpec = incrementalIndexSchema.getDimensionsSpec(); this.dimensionDescs = Maps.newLinkedHashMap(); this.dimensionDescsList = new ArrayList<>(); for (DimensionSchema dimSchema : dimensionsSpec.getDimensions()) { ValueType type = TYPE_MAP.get(dimSchema.getValueType()); String dimName = dimSchema.getName(); ColumnCapabilitiesImpl capabilities = makeCapabilitesFromValueType(type); if (dimSchema.getTypeName().equals(DimensionSchema.SPATIAL_TYPE_NAME)) { capabilities.setHasSpatialIndexes(true); } else { DimensionHandler handler = DimensionHandlerUtils.getHandlerFromCapabilities( dimName, capabilities, dimSchema.getMultiValueHandling() ); addNewDimension(dimName, capabilities, handler); } columnCapabilities.put(dimName, capabilities); } //__time capabilities ColumnCapabilitiesImpl timeCapabilities = new ColumnCapabilitiesImpl(); timeCapabilities.setType(ValueType.LONG); columnCapabilities.put(Column.TIME_COLUMN_NAME, timeCapabilities); // This should really be more generic List<SpatialDimensionSchema> spatialDimensions = dimensionsSpec.getSpatialDimensions(); if (!spatialDimensions.isEmpty()) { this.rowTransformers.add(new SpatialDimensionRowTransformer(spatialDimensions)); } } public static class Builder { private IncrementalIndexSchema incrementalIndexSchema; private boolean deserializeComplexMetrics; private boolean reportParseExceptions; private boolean concurrentEventAdd; private boolean sortFacts; private int maxRowCount; public Builder() { incrementalIndexSchema = null; deserializeComplexMetrics = true; reportParseExceptions = true; concurrentEventAdd = false; sortFacts = true; maxRowCount = 0; } public Builder setIndexSchema(final IncrementalIndexSchema incrementalIndexSchema) { this.incrementalIndexSchema = incrementalIndexSchema; return this; } /** * A helper method to set a simple index schema with only metrics and default values for the other parameters. Note * that this method is normally used for testing and benchmarking; it is unlikely that you would use it in * production settings. * * @param metrics variable array of {@link AggregatorFactory} metrics * * @return this */ @VisibleForTesting public Builder setSimpleTestingIndexSchema(final AggregatorFactory... metrics) { this.incrementalIndexSchema = new IncrementalIndexSchema.Builder() .withMetrics(metrics) .build(); return this; } public Builder setDeserializeComplexMetrics(final boolean deserializeComplexMetrics) { this.deserializeComplexMetrics = deserializeComplexMetrics; return this; } public Builder setReportParseExceptions(final boolean reportParseExceptions) { this.reportParseExceptions = reportParseExceptions; return this; } public Builder setConcurrentEventAdd(final boolean concurrentEventAdd) { this.concurrentEventAdd = concurrentEventAdd; return this; } public Builder setSortFacts(final boolean sortFacts) { this.sortFacts = sortFacts; return this; } public Builder setMaxRowCount(final int maxRowCount) { this.maxRowCount = maxRowCount; return this; } public IncrementalIndex buildOnheap() { if (maxRowCount <= 0) { throw new IllegalArgumentException("Invalid max row count: " + maxRowCount); } return new OnheapIncrementalIndex( Objects.requireNonNull(incrementalIndexSchema, "incrementIndexSchema is null"), deserializeComplexMetrics, reportParseExceptions, concurrentEventAdd, sortFacts, maxRowCount ); } public IncrementalIndex buildOffheap(final NonBlockingPool<ByteBuffer> bufferPool) { if (maxRowCount <= 0) { throw new IllegalArgumentException("Invalid max row count: " + maxRowCount); } return new OffheapIncrementalIndex( Objects.requireNonNull(incrementalIndexSchema, "incrementalIndexSchema is null"), deserializeComplexMetrics, reportParseExceptions, concurrentEventAdd, sortFacts, maxRowCount, Objects.requireNonNull(bufferPool, "bufferPool is null") ); } } public boolean isRollup() { return rollup; } public abstract FactsHolder getFacts(); public abstract boolean canAppendRow(); public abstract String getOutOfRowsReason(); protected abstract AggregatorType[] initAggs( AggregatorFactory[] metrics, Supplier<InputRow> rowSupplier, boolean deserializeComplexMetrics, boolean concurrentEventAdd ); // Note: This method needs to be thread safe. protected abstract Integer addToFacts( AggregatorFactory[] metrics, boolean deserializeComplexMetrics, boolean reportParseExceptions, InputRow row, AtomicInteger numEntries, TimeAndDims key, ThreadLocal<InputRow> rowContainer, Supplier<InputRow> rowSupplier ) throws IndexSizeExceededException; public abstract int getLastRowIndex(); protected abstract AggregatorType[] getAggsForRow(int rowOffset); protected abstract Object getAggVal(AggregatorType agg, int rowOffset, int aggPosition); protected abstract float getMetricFloatValue(int rowOffset, int aggOffset); protected abstract long getMetricLongValue(int rowOffset, int aggOffset); protected abstract Object getMetricObjectValue(int rowOffset, int aggOffset); protected abstract double getMetricDoubleValue(int rowOffset, int aggOffset); @Override public void close() { } public InputRow formatRow(InputRow row) { for (Function<InputRow, InputRow> rowTransformer : rowTransformers) { row = rowTransformer.apply(row); } if (row == null) { throw new IAE("Row is null? How can this be?!"); } return row; } private ValueType getTypeFromDimVal(Object dimVal) { Object singleVal; if (dimVal instanceof List) { List dimValList = (List) dimVal; singleVal = dimValList.size() == 0 ? null : dimValList.get(0); } else { singleVal = dimVal; } if (singleVal == null) { return null; } return TYPE_MAP.get(singleVal.getClass()); } public Map<String, ColumnCapabilitiesImpl> getColumnCapabilities() { return columnCapabilities; } /** * Adds a new row. The row might correspond with another row that already exists, in which case this will * update that row instead of inserting a new one. * <p> * <p> * Calls to add() are thread safe. * <p> * * @param row the row of data to add * * @return the number of rows in the data set after adding the InputRow */ public int add(InputRow row) throws IndexSizeExceededException { TimeAndDims key = toTimeAndDims(row); final int rv = addToFacts( metrics, deserializeComplexMetrics, reportParseExceptions, row, numEntries, key, in, rowSupplier ); updateMaxIngestedTime(row.getTimestamp()); return rv; } @VisibleForTesting TimeAndDims toTimeAndDims(InputRow row) throws IndexSizeExceededException { row = formatRow(row); if (row.getTimestampFromEpoch() < minTimestamp) { throw new IAE("Cannot add row[%s] because it is below the minTimestamp[%s]", row, new DateTime(minTimestamp)); } final List<String> rowDimensions = row.getDimensions(); Object[] dims; List<Object> overflow = null; synchronized (dimensionDescs) { dims = new Object[dimensionDescs.size()]; for (String dimension : rowDimensions) { boolean wasNewDim = false; ColumnCapabilitiesImpl capabilities; DimensionDesc desc = dimensionDescs.get(dimension); if (desc != null) { capabilities = desc.getCapabilities(); } else { wasNewDim = true; capabilities = columnCapabilities.get(dimension); if (capabilities == null) { capabilities = new ColumnCapabilitiesImpl(); // For schemaless type discovery, assume everything is a String for now, can change later. capabilities.setType(ValueType.STRING); capabilities.setDictionaryEncoded(true); capabilities.setHasBitmapIndexes(true); columnCapabilities.put(dimension, capabilities); } DimensionHandler handler = DimensionHandlerUtils.getHandlerFromCapabilities(dimension, capabilities, null); desc = addNewDimension(dimension, capabilities, handler); } DimensionHandler handler = desc.getHandler(); DimensionIndexer indexer = desc.getIndexer(); Object dimsKey = indexer.processRowValsToUnsortedEncodedKeyComponent(row.getRaw(dimension)); // Set column capabilities as data is coming in if (!capabilities.hasMultipleValues() && dimsKey != null && handler.getLengthOfEncodedKeyComponent(dimsKey) > 1) { capabilities.setHasMultipleValues(true); } if (wasNewDim) { if (overflow == null) { overflow = Lists.newArrayList(); } overflow.add(dimsKey); } else if (desc.getIndex() > dims.length || dims[desc.getIndex()] != null) { /* * index > dims.length requires that we saw this dimension and added it to the dimensionOrder map, * otherwise index is null. Since dims is initialized based on the size of dimensionOrder on each call to add, * it must have been added to dimensionOrder during this InputRow. * * if we found an index for this dimension it means we've seen it already. If !(index > dims.length) then * we saw it on a previous input row (this its safe to index into dims). If we found a value in * the dims array for this index, it means we have seen this dimension already on this input row. */ throw new ISE("Dimension[%s] occurred more than once in InputRow", dimension); } else { dims[desc.getIndex()] = dimsKey; } } } if (overflow != null) { // Merge overflow and non-overflow Object[] newDims = new Object[dims.length + overflow.size()]; System.arraycopy(dims, 0, newDims, 0, dims.length); for (int i = 0; i < overflow.size(); ++i) { newDims[dims.length + i] = overflow.get(i); } dims = newDims; } long truncated = 0; if (row.getTimestamp() != null) { truncated = gran.bucketStart(row.getTimestamp()).getMillis(); } return new TimeAndDims(Math.max(truncated, minTimestamp), dims, dimensionDescsList); } private synchronized void updateMaxIngestedTime(DateTime eventTime) { if (maxIngestedEventTime == null || maxIngestedEventTime.isBefore(eventTime)) { maxIngestedEventTime = eventTime; } } public boolean isEmpty() { return numEntries.get() == 0; } public int size() { return numEntries.get(); } private long getMinTimeMillis() { return getFacts().getMinTimeMillis(); } private long getMaxTimeMillis() { return getFacts().getMaxTimeMillis(); } public AggregatorType[] getAggs() { return aggs; } public AggregatorFactory[] getMetricAggs() { return metrics; } public List<String> getDimensionNames() { synchronized (dimensionDescs) { return ImmutableList.copyOf(dimensionDescs.keySet()); } } public List<DimensionDesc> getDimensions() { synchronized (dimensionDescs) { return ImmutableList.copyOf(dimensionDescs.values()); } } public DimensionDesc getDimension(String dimension) { synchronized (dimensionDescs) { return dimensionDescs.get(dimension); } } public String getMetricType(String metric) { final MetricDesc metricDesc = metricDescs.get(metric); return metricDesc != null ? metricDesc.getType() : null; } public Class getMetricClass(String metric) { MetricDesc metricDesc = metricDescs.get(metric); switch (metricDesc.getCapabilities().getType()) { case COMPLEX: return ComplexMetrics.getSerdeForType(metricDesc.getType()).getObjectStrategy().getClazz(); case DOUBLE: return Double.class; case FLOAT: return Float.class; case LONG: return Long.class; case STRING: return String.class; } return null; } public Interval getInterval() { return new Interval(minTimestamp, isEmpty() ? minTimestamp : gran.increment(new DateTime(getMaxTimeMillis())).getMillis()); } public DateTime getMinTime() { return isEmpty() ? null : new DateTime(getMinTimeMillis()); } public DateTime getMaxTime() { return isEmpty() ? null : new DateTime(getMaxTimeMillis()); } public Integer getDimensionIndex(String dimension) { DimensionDesc dimSpec = getDimension(dimension); return dimSpec == null ? null : dimSpec.getIndex(); } public List<String> getDimensionOrder() { synchronized (dimensionDescs) { return ImmutableList.copyOf(dimensionDescs.keySet()); } } private ColumnCapabilitiesImpl makeCapabilitesFromValueType(ValueType type) { ColumnCapabilitiesImpl capabilities = new ColumnCapabilitiesImpl(); capabilities.setDictionaryEncoded(type == ValueType.STRING); capabilities.setHasBitmapIndexes(type == ValueType.STRING); capabilities.setType(type); return capabilities; } /** * Currently called to initialize IncrementalIndex dimension order during index creation * Index dimension ordering could be changed to initialize from DimensionsSpec after resolution of * https://github.com/druid-io/druid/issues/2011 */ public void loadDimensionIterable(Iterable<String> oldDimensionOrder, Map<String, ColumnCapabilitiesImpl> oldColumnCapabilities) { synchronized (dimensionDescs) { if (!dimensionDescs.isEmpty()) { throw new ISE("Cannot load dimension order when existing order[%s] is not empty.", dimensionDescs.keySet()); } for (String dim : oldDimensionOrder) { if (dimensionDescs.get(dim) == null) { ColumnCapabilitiesImpl capabilities = oldColumnCapabilities.get(dim); columnCapabilities.put(dim, capabilities); DimensionHandler handler = DimensionHandlerUtils.getHandlerFromCapabilities(dim, capabilities, null); addNewDimension(dim, capabilities, handler); } } } } @GuardedBy("dimensionDescs") private DimensionDesc addNewDimension(String dim, ColumnCapabilitiesImpl capabilities, DimensionHandler handler) { DimensionDesc desc = new DimensionDesc(dimensionDescs.size(), dim, capabilities, handler); dimensionDescs.put(dim, desc); dimensionDescsList.add(desc); return desc; } public List<String> getMetricNames() { return ImmutableList.copyOf(metricDescs.keySet()); } public List<MetricDesc> getMetrics() { return ImmutableList.copyOf(metricDescs.values()); } public Integer getMetricIndex(String metricName) { MetricDesc metSpec = metricDescs.get(metricName); return metSpec == null ? null : metSpec.getIndex(); } public ColumnCapabilities getCapabilities(String column) { return columnCapabilities.get(column); } public Metadata getMetadata() { return metadata; } private static AggregatorFactory[] getCombiningAggregators(AggregatorFactory[] aggregators) { AggregatorFactory[] combiningAggregators = new AggregatorFactory[aggregators.length]; for (int i = 0; i < aggregators.length; i++) { combiningAggregators[i] = aggregators[i].getCombiningFactory(); } return combiningAggregators; } public Map<String, DimensionHandler> getDimensionHandlers() { Map<String, DimensionHandler> handlers = Maps.newLinkedHashMap(); for (DimensionDesc desc : dimensionDescsList) { handlers.put(desc.getName(), desc.getHandler()); } return handlers; } @Override public Iterator<Row> iterator() { return iterableWithPostAggregations(null, false).iterator(); } public Iterable<Row> iterableWithPostAggregations(final List<PostAggregator> postAggs, final boolean descending) { return new Iterable<Row>() { @Override public Iterator<Row> iterator() { final List<DimensionDesc> dimensions = getDimensions(); return Iterators.transform( getFacts().iterator(descending), timeAndDims -> { final int rowOffset = timeAndDims.getRowIndex(); Object[] theDims = timeAndDims.getDims(); Map<String, Object> theVals = Maps.newLinkedHashMap(); for (int i = 0; i < theDims.length; ++i) { Object dim = theDims[i]; DimensionDesc dimensionDesc = dimensions.get(i); if (dimensionDesc == null) { continue; } String dimensionName = dimensionDesc.getName(); DimensionHandler handler = dimensionDesc.getHandler(); if (dim == null || handler.getLengthOfEncodedKeyComponent(dim) == 0) { theVals.put(dimensionName, null); continue; } final DimensionIndexer indexer = dimensionDesc.getIndexer(); Object rowVals = indexer.convertUnsortedEncodedKeyComponentToActualArrayOrList(dim, DimensionIndexer.LIST); theVals.put(dimensionName, rowVals); } AggregatorType[] aggs = getAggsForRow(rowOffset); for (int i = 0; i < aggs.length; ++i) { theVals.put(metrics[i].getName(), getAggVal(aggs[i], rowOffset, i)); } if (postAggs != null) { for (PostAggregator postAgg : postAggs) { theVals.put(postAgg.getName(), postAgg.compute(theVals)); } } return new MapBasedRow(timeAndDims.getTimestamp(), theVals); } ); } }; } public DateTime getMaxIngestedEventTime() { return maxIngestedEventTime; } public static final class DimensionDesc { private final int index; private final String name; private final ColumnCapabilitiesImpl capabilities; private final DimensionHandler handler; private final DimensionIndexer indexer; public DimensionDesc(int index, String name, ColumnCapabilitiesImpl capabilities, DimensionHandler handler) { this.index = index; this.name = name; this.capabilities = capabilities; this.handler = handler; this.indexer = handler.makeIndexer(); } public int getIndex() { return index; } public String getName() { return name; } public ColumnCapabilitiesImpl getCapabilities() { return capabilities; } public DimensionHandler getHandler() { return handler; } public DimensionIndexer getIndexer() { return indexer; } } public static final class MetricDesc { private final int index; private final String name; private final String type; private final ColumnCapabilitiesImpl capabilities; public MetricDesc(int index, AggregatorFactory factory) { this.index = index; this.name = factory.getName(); String typeInfo = factory.getTypeName(); this.capabilities = new ColumnCapabilitiesImpl(); if (typeInfo.equalsIgnoreCase("float")) { capabilities.setType(ValueType.FLOAT); this.type = typeInfo; } else if (typeInfo.equalsIgnoreCase("long")) { capabilities.setType(ValueType.LONG); this.type = typeInfo; } else if (typeInfo.equalsIgnoreCase("double")) { capabilities.setType(ValueType.DOUBLE); this.type = typeInfo; } else { capabilities.setType(ValueType.COMPLEX); this.type = ComplexMetrics.getSerdeForType(typeInfo).getTypeName(); } } public int getIndex() { return index; } public String getName() { return name; } public String getType() { return type; } public ColumnCapabilitiesImpl getCapabilities() { return capabilities; } } public static final class TimeAndDims { public static final int EMPTY_ROW_INDEX = -1; private final long timestamp; private final Object[] dims; private final List<DimensionDesc> dimensionDescsList; /** * rowIndex is not checked in {@link #equals} and {@link #hashCode} on purpose. TimeAndDims acts as a Map key * and "entry" object (rowIndex is the "value") at the same time. This is done to reduce object indirection and * improve locality, and avoid boxing of rowIndex as Integer, when stored in JDK collection: * {@link RollupFactsHolder} needs concurrent collections, that are not present in fastutil. */ private int rowIndex; TimeAndDims( long timestamp, Object[] dims, List<DimensionDesc> dimensionDescsList ) { this(timestamp, dims, dimensionDescsList, EMPTY_ROW_INDEX); } TimeAndDims( long timestamp, Object[] dims, List<DimensionDesc> dimensionDescsList, int rowIndex ) { this.timestamp = timestamp; this.dims = dims; this.dimensionDescsList = dimensionDescsList; this.rowIndex = rowIndex; } public long getTimestamp() { return timestamp; } public Object[] getDims() { return dims; } public int getRowIndex() { return rowIndex; } private void setRowIndex(int rowIndex) { this.rowIndex = rowIndex; } @Override public String toString() { return "TimeAndDims{" + "timestamp=" + new DateTime(timestamp) + ", dims=" + Lists.transform( Arrays.asList(dims), new Function<Object, Object>() { @Override public Object apply(@Nullable Object input) { if (input == null || Array.getLength(input) == 0) { return Collections.singletonList("null"); } return Collections.singletonList(input); } } ) + '}'; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } TimeAndDims that = (TimeAndDims) o; if (timestamp != that.timestamp) { return false; } if (dims.length != that.dims.length) { return false; } for (int i = 0; i < dims.length; i++) { final DimensionIndexer indexer = dimensionDescsList.get(i).getIndexer(); if (!indexer.checkUnsortedEncodedKeyComponentsEqual(dims[i], that.dims[i])) { return false; } } return true; } @Override public int hashCode() { int hash = (int) timestamp; for (int i = 0; i < dims.length; i++) { final DimensionIndexer indexer = dimensionDescsList.get(i).getIndexer(); hash = 31 * hash + indexer.getUnsortedEncodedKeyComponentHashCode(dims[i]); } return hash; } } protected ColumnSelectorFactory makeColumnSelectorFactory( final AggregatorFactory agg, final Supplier<InputRow> in, final boolean deserializeComplexMetrics ) { return makeColumnSelectorFactory(virtualColumns, agg, in, deserializeComplexMetrics); } protected final Comparator<TimeAndDims> dimsComparator() { return new TimeAndDimsComp(dimensionDescsList); } @VisibleForTesting static final class TimeAndDimsComp implements Comparator<TimeAndDims> { private List<DimensionDesc> dimensionDescs; public TimeAndDimsComp(List<DimensionDesc> dimDescs) { this.dimensionDescs = dimDescs; } @Override public int compare(TimeAndDims lhs, TimeAndDims rhs) { int retVal = Longs.compare(lhs.timestamp, rhs.timestamp); int numComparisons = Math.min(lhs.dims.length, rhs.dims.length); int index = 0; while (retVal == 0 && index < numComparisons) { final Object lhsIdxs = lhs.dims[index]; final Object rhsIdxs = rhs.dims[index]; if (lhsIdxs == null) { if (rhsIdxs == null) { ++index; continue; } return -1; } if (rhsIdxs == null) { return 1; } final DimensionIndexer indexer = dimensionDescs.get(index).getIndexer(); retVal = indexer.compareUnsortedEncodedKeyComponents(lhsIdxs, rhsIdxs); ++index; } if (retVal == 0) { int lengthDiff = Ints.compare(lhs.dims.length, rhs.dims.length); if (lengthDiff == 0) { return 0; } Object[] largerDims = lengthDiff > 0 ? lhs.dims : rhs.dims; return allNull(largerDims, numComparisons) ? 0 : lengthDiff; } return retVal; } } private static boolean allNull(Object[] dims, int startPosition) { for (int i = startPosition; i < dims.length; i++) { if (dims[i] != null) { return false; } } return true; } interface FactsHolder { /** * @return the previous rowIndex associated with the specified key, or * {@code TimeAndDims#EMPTY_ROW_INDEX} if there was no mapping for the key. */ int getPriorIndex(TimeAndDims key); long getMinTimeMillis(); long getMaxTimeMillis(); Iterator<TimeAndDims> iterator(boolean descending); Iterable<TimeAndDims> timeRangeIterable(boolean descending, long timeStart, long timeEnd); Iterable<TimeAndDims> keySet(); /** * @return the previous rowIndex associated with the specified key, or * {@code TimeAndDims#EMPTY_ROW_INDEX} if there was no mapping for the key. */ int putIfAbsent(TimeAndDims key, int rowIndex); void clear(); } static class RollupFactsHolder implements FactsHolder { private final boolean sortFacts; // Can't use Set because we need to be able to get from collection private final ConcurrentMap<TimeAndDims, TimeAndDims> facts; private final List<DimensionDesc> dimensionDescsList; public RollupFactsHolder(boolean sortFacts, Comparator<TimeAndDims> timeAndDimsComparator, List<DimensionDesc> dimensionDescsList) { this.sortFacts = sortFacts; if (sortFacts) { this.facts = new ConcurrentSkipListMap<>(timeAndDimsComparator); } else { this.facts = new ConcurrentHashMap<>(); } this.dimensionDescsList = dimensionDescsList; } @Override public int getPriorIndex(TimeAndDims key) { TimeAndDims timeAndDims = facts.get(key); return timeAndDims == null ? TimeAndDims.EMPTY_ROW_INDEX : timeAndDims.rowIndex; } @Override public long getMinTimeMillis() { if (sortFacts) { return ((ConcurrentNavigableMap<TimeAndDims, TimeAndDims>) facts).firstKey().getTimestamp(); } else { throw new UnsupportedOperationException("can't get minTime from unsorted facts data."); } } @Override public long getMaxTimeMillis() { if (sortFacts) { return ((ConcurrentNavigableMap<TimeAndDims, TimeAndDims>) facts).lastKey().getTimestamp(); } else { throw new UnsupportedOperationException("can't get maxTime from unsorted facts data."); } } @Override public Iterator<TimeAndDims> iterator(boolean descending) { if (descending && sortFacts) { return ((ConcurrentNavigableMap<TimeAndDims, TimeAndDims>) facts).descendingMap().keySet().iterator(); } return keySet().iterator(); } @Override public Iterable<TimeAndDims> timeRangeIterable(boolean descending, long timeStart, long timeEnd) { if (!sortFacts) { throw new UnsupportedOperationException("can't get timeRange from unsorted facts data."); } TimeAndDims start = new TimeAndDims(timeStart, new Object[]{}, dimensionDescsList); TimeAndDims end = new TimeAndDims(timeEnd, new Object[]{}, dimensionDescsList); ConcurrentNavigableMap<TimeAndDims, TimeAndDims> subMap = ((ConcurrentNavigableMap<TimeAndDims, TimeAndDims>) facts).subMap(start, end); final Map<TimeAndDims, TimeAndDims> rangeMap = descending ? subMap.descendingMap() : subMap; return rangeMap.keySet(); } @Override public Iterable<TimeAndDims> keySet() { return facts.keySet(); } @Override public int putIfAbsent(TimeAndDims key, int rowIndex) { // setRowIndex() must be called before facts.putIfAbsent() for visibility of rowIndex from concurrent readers. key.setRowIndex(rowIndex); TimeAndDims prev = facts.putIfAbsent(key, key); return prev == null ? TimeAndDims.EMPTY_ROW_INDEX : prev.rowIndex; } @Override public void clear() { facts.clear(); } } static class PlainFactsHolder implements FactsHolder { private final boolean sortFacts; private final ConcurrentMap<Long, Deque<TimeAndDims>> facts; public PlainFactsHolder(boolean sortFacts) { this.sortFacts = sortFacts; if (sortFacts) { this.facts = new ConcurrentSkipListMap<>(); } else { this.facts = new ConcurrentHashMap<>(); } } @Override public int getPriorIndex(TimeAndDims key) { // always return EMPTY_ROW_INDEX to indicate that no prior key cause we always add new row return TimeAndDims.EMPTY_ROW_INDEX; } @Override public long getMinTimeMillis() { if (sortFacts) { return ((ConcurrentNavigableMap<Long, Deque<TimeAndDims>>) facts).firstKey(); } else { throw new UnsupportedOperationException("can't get minTime from unsorted facts data."); } } @Override public long getMaxTimeMillis() { if (sortFacts) { return ((ConcurrentNavigableMap<Long, Deque<TimeAndDims>>) facts).lastKey(); } else { throw new UnsupportedOperationException("can't get maxTime from unsorted facts data."); } } @Override public Iterator<TimeAndDims> iterator(boolean descending) { if (descending && sortFacts) { return concat(((ConcurrentNavigableMap<Long, Deque<TimeAndDims>>) facts) .descendingMap().values(), true).iterator(); } return concat(facts.values(), false).iterator(); } @Override public Iterable<TimeAndDims> timeRangeIterable(boolean descending, long timeStart, long timeEnd) { ConcurrentNavigableMap<Long, Deque<TimeAndDims>> subMap = ((ConcurrentNavigableMap<Long, Deque<TimeAndDims>>) facts).subMap(timeStart, timeEnd); final Map<Long, Deque<TimeAndDims>> rangeMap = descending ? subMap.descendingMap() : subMap; return concat(rangeMap.values(), descending); } private Iterable<TimeAndDims> concat( final Iterable<Deque<TimeAndDims>> iterable, final boolean descending ) { return () -> Iterators.concat( Iterators.transform( iterable.iterator(), input -> descending ? input.descendingIterator() : input.iterator() ) ); } @Override public Iterable<TimeAndDims> keySet() { return concat(facts.values(), false); } @Override public int putIfAbsent(TimeAndDims key, int rowIndex) { Long time = key.getTimestamp(); Deque<TimeAndDims> rows = facts.get(time); if (rows == null) { facts.putIfAbsent(time, new ConcurrentLinkedDeque<>()); // in race condition, rows may be put by other thread, so always get latest status from facts rows = facts.get(time); } // setRowIndex() must be called before rows.add() for visibility of rowIndex from concurrent readers. key.setRowIndex(rowIndex); rows.add(key); // always return EMPTY_ROW_INDEX to indicate that we always add new row return TimeAndDims.EMPTY_ROW_INDEX; } @Override public void clear() { facts.clear(); } } }
package org.jetbrains.plugins.ipnb.format; import com.google.common.collect.Lists; import com.google.gson.*; import com.google.gson.annotations.SerializedName; import com.google.gson.internal.LinkedTreeMap; import com.google.gson.reflect.TypeToken; import com.google.gson.stream.JsonWriter; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.Document; import com.intellij.openapi.module.Module; import com.intellij.openapi.project.Project; import com.intellij.openapi.project.ProjectUtil; import com.intellij.openapi.projectRoots.Sdk; import com.intellij.openapi.roots.ProjectRootManager; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.util.text.VersionComparatorUtil; import com.jetbrains.python.packaging.PyPackage; import com.jetbrains.python.packaging.PyPackageUtil; import com.jetbrains.python.sdk.PythonSdkType; import org.apache.commons.lang.math.NumberUtils; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.plugins.ipnb.editor.panels.IpnbEditablePanel; import org.jetbrains.plugins.ipnb.editor.panels.IpnbFilePanel; import org.jetbrains.plugins.ipnb.format.cells.*; import org.jetbrains.plugins.ipnb.format.cells.output.*; import java.io.*; import java.lang.reflect.Type; import java.nio.charset.Charset; import java.util.*; public class IpnbParser { private static final Logger LOG = Logger.getInstance(IpnbParser.class); private static final Gson gson = initGson(); private static final List<String> myErrors = new ArrayList<>(); private static final String VALIDATION_ERROR_TEXT = "An invalid notebook may not function properly. The validation error was:"; private static final String VALIDATION_ERROR_TITLE = "Notebook Validation Failed"; @NotNull private static Gson initGson() { final GsonBuilder builder = new GsonBuilder().setPrettyPrinting().disableHtmlEscaping() .registerTypeAdapter(IpnbCellRaw.class, new RawCellAdapter()) .registerTypeAdapter(IpnbFileRaw.class, new FileAdapter()) .registerTypeAdapter(CellOutputRaw.class, new OutputsAdapter()) .registerTypeAdapter(OutputDataRaw.class, new OutputDataAdapter()) .registerTypeAdapter(CellOutputRaw.class, new CellOutputDeserializer()) .registerTypeAdapter(OutputDataRaw.class, new OutputDataDeserializer()) .registerTypeAdapter(IpnbCellRaw.class, new CellRawDeserializer()).serializeNulls(); return builder.create(); } @NotNull public static IpnbFile parseIpnbFile(@NotNull final CharSequence fileText, @NotNull final VirtualFile virtualFile) { myErrors.clear(); final String path = virtualFile.getPath(); IpnbFileRaw rawFile = gson.fromJson(fileText.toString(), IpnbFileRaw.class); if (rawFile == null) { int nbformat = isIpythonNewFormat(virtualFile) ? 4 : 3; return new IpnbFile(new HashMap<>(), nbformat, 0, Lists.newArrayList(), path); } List<IpnbCell> cells = new ArrayList<>(); final List<IpnbWorksheet> worksheets = rawFile.worksheets; if (worksheets == null) { for (IpnbCellRaw rawCell : rawFile.cells) { cells.add(rawCell.createCell(validateSource(rawCell))); } } else { for (IpnbWorksheet worksheet : worksheets) { final List<IpnbCellRaw> rawCells = worksheet.cells; for (IpnbCellRaw rawCell : rawCells) { cells.add(rawCell.createCell(validateSource(rawCell))); } } } showValidationMessage(); return new IpnbFile(rawFile.metadata, rawFile.nbformat, rawFile.nbformat_minor, cells, path); } private static boolean validateSource(IpnbCellRaw cell) { if (cell.source == null && cell.input == null) { final String error = VALIDATION_ERROR_TEXT + "\n" + "\"source\" or \"input\" is required property:\n" + cell; myErrors.add(error); LOG.warn(error); return false; } return true; } private static void showValidationMessage() { if (!myErrors.isEmpty()) { Messages.showWarningDialog(myErrors.get(0), VALIDATION_ERROR_TITLE); } } public static boolean isIpythonNewFormat(@NotNull final VirtualFile virtualFile) { final Project project = ProjectUtil.guessProjectForFile(virtualFile); if (project != null) { final Module module = ProjectRootManager.getInstance(project).getFileIndex().getModuleForFile(virtualFile); if (module != null) { final Sdk sdk = PythonSdkType.findPythonSdk(module); if (sdk != null) { // It should be called first before IpnbConnectionManager#startIpythonServer() final List<PyPackage> packages = PyPackageUtil.refreshAndGetPackagesModally(sdk); final PyPackage ipython = packages != null ? PyPackageUtil.findPackage(packages, "ipython") : null; final PyPackage jupyter = packages != null ? PyPackageUtil.findPackage(packages, "jupyter") : null; if (jupyter == null && ipython != null && VersionComparatorUtil.compare(ipython.getVersion(), "3.0") <= 0) { return false; } } } } return true; } @NotNull public static IpnbFile parseIpnbFile(@NotNull Document document, @NotNull final VirtualFile virtualFile) throws IOException { return parseIpnbFile(document.getImmutableCharSequence(), virtualFile); } public static void saveIpnbFile(@NotNull final IpnbFilePanel ipnbPanel) { final String json = newDocumentText(ipnbPanel); if (json == null) return; writeToFile(ipnbPanel.getIpnbFile().getPath(), json); } @Nullable public static String newDocumentText(@NotNull final IpnbFilePanel ipnbPanel) { final IpnbFile ipnbFile = ipnbPanel.getIpnbFile(); if (ipnbFile == null) return null; for (IpnbEditablePanel panel : ipnbPanel.getIpnbPanels()) { if (panel.isModified()) { panel.updateCellSource(); } } final IpnbFileRaw fileRaw = new IpnbFileRaw(); fileRaw.nbformat_minor = ipnbFile.getNbFormatMinor(); fileRaw.metadata = ipnbFile.getMetadata(); if (ipnbFile.getNbformat() == 4) { for (IpnbCell cell : ipnbFile.getCells()) { fileRaw.cells.add(IpnbCellRaw.fromCell(cell, ipnbFile.getNbformat())); } } else { final IpnbWorksheet worksheet = new IpnbWorksheet(); worksheet.cells.clear(); for (IpnbCell cell : ipnbFile.getCells()) { worksheet.cells.add(IpnbCellRaw.fromCell(cell, ipnbFile.getNbformat())); } fileRaw.worksheets = Collections.singletonList(worksheet); } final StringWriter stringWriter = new StringWriter(); final JsonWriter writer = new JsonWriter(stringWriter); writer.setIndent(" "); gson.toJson(fileRaw, fileRaw.getClass(), writer); return stringWriter.toString() +"\n"; } private static void writeToFile(@NotNull final String path, @NotNull final String json) { final File file = new File(path); try { final FileOutputStream fileOutputStream = new FileOutputStream(file); final OutputStreamWriter writer = new OutputStreamWriter(fileOutputStream, Charset.forName("UTF-8").newEncoder()); try { writer.write(json); } catch (IOException e) { LOG.error(e); } finally { try { writer.close(); fileOutputStream.close(); } catch (IOException e) { LOG.error(e); } } } catch (FileNotFoundException e) { LOG.error(e); } } @SuppressWarnings("unused") public static class IpnbFileRaw { List<IpnbWorksheet> worksheets; List<IpnbCellRaw> cells = new ArrayList<>(); Map<String, Object> metadata = new HashMap<>(); int nbformat = 4; int nbformat_minor; } private static class IpnbWorksheet { final List<IpnbCellRaw> cells = new ArrayList<>(); } @SuppressWarnings("unused") private static class IpnbCellRaw { String cell_type; Integer execution_count; Map<String, Object> metadata = new HashMap<>(); Integer level; List<CellOutputRaw> outputs; List<String> source; List<String> input; String language; Integer prompt_number; @Override public String toString() { return new GsonBuilder().setPrettyPrinting().create().toJson(this); } public static IpnbCellRaw fromCell(@NotNull final IpnbCell cell, int nbformat) { final IpnbCellRaw raw = new IpnbCellRaw(); if (cell instanceof IpnbEditableCell) { raw.metadata = ((IpnbEditableCell)cell).getMetadata(); } if (cell instanceof IpnbMarkdownCell) { raw.cell_type = "markdown"; raw.source = ((IpnbMarkdownCell)cell).getSource(); } else if (cell instanceof IpnbCodeCell) { raw.cell_type = "code"; final ArrayList<CellOutputRaw> outputRaws = new ArrayList<>(); for (IpnbOutputCell outputCell : ((IpnbCodeCell)cell).getCellOutputs()) { outputRaws.add(CellOutputRaw.fromOutput(outputCell, nbformat)); } raw.outputs = outputRaws; final Integer promptNumber = ((IpnbCodeCell)cell).getPromptNumber(); if (nbformat == 4) { raw.execution_count = promptNumber != null && promptNumber >= 0 ? promptNumber : null; raw.source = ((IpnbCodeCell)cell).getSource(); } else { raw.prompt_number = promptNumber != null && promptNumber >= 0 ? promptNumber : null; raw.language = ((IpnbCodeCell)cell).getLanguage(); raw.input = ((IpnbCodeCell)cell).getSource(); } } else if (cell instanceof IpnbRawCell) { raw.cell_type = "raw"; raw.source = ((IpnbRawCell)cell).getSource(); } else if (cell instanceof IpnbHeadingCell) { raw.cell_type = "heading"; raw.source = ((IpnbHeadingCell)cell).getSource(); raw.level = ((IpnbHeadingCell)cell).getLevel(); } return raw; } @Nullable public IpnbCell createCell(boolean isValidSource) { final IpnbCell cell; if (cell_type.equals("markdown")) { cell = new IpnbMarkdownCell(isValidSource ? source : new ArrayList<>(), metadata); } else if (cell_type.equals("code")) { final List<IpnbOutputCell> outputCells = new ArrayList<>(); for (CellOutputRaw outputRaw : outputs) { outputCells.add(outputRaw.createOutput()); } final Integer prompt = prompt_number != null ? prompt_number : execution_count; cell = new IpnbCodeCell(language == null ? "python" : language, input == null ? (isValidSource ? source : new ArrayList<>()) : input, prompt, outputCells, metadata); } else if (cell_type.equals("raw")) { cell = new IpnbRawCell(isValidSource ? source : new ArrayList<>()); } else if (cell_type.equals("heading")) { cell = new IpnbHeadingCell(isValidSource ? source : new ArrayList<>(), level, metadata); } else { cell = null; } return cell; } } private static class CellOutputRaw { String ename; String name; String evalue; OutputDataRaw data; Integer execution_count; String png; String stream; String jpeg; List<String> html; List<String> latex; List<String> svg; Integer prompt_number; String output_type; List<String> traceback; Map<String, Object> metadata; List<String> text; public static CellOutputRaw fromOutput(@NotNull final IpnbOutputCell outputCell, int nbformat) { final CellOutputRaw raw = new CellOutputRaw(); raw.metadata = outputCell.getMetadata(); if (raw.metadata == null && !(outputCell instanceof IpnbStreamOutputCell) && !(outputCell instanceof IpnbErrorOutputCell)) { raw.metadata = new HashMap<>(); } if (outputCell instanceof IpnbPngOutputCell) { if (nbformat == 4) { final OutputDataRaw dataRaw = new OutputDataRaw(); dataRaw.png = ((IpnbPngOutputCell)outputCell).getBase64String(); dataRaw.text = outputCell.getText(); raw.data = dataRaw; raw.execution_count = outputCell.getPromptNumber(); raw.output_type = outputCell.getPromptNumber() != null ? "execute_result" : "display_data"; } else { raw.png = ((IpnbPngOutputCell)outputCell).getBase64String(); raw.text = outputCell.getText(); } } else if (outputCell instanceof IpnbSvgOutputCell) { if (nbformat == 4) { final OutputDataRaw dataRaw = new OutputDataRaw(); dataRaw.text = outputCell.getText(); dataRaw.svg = ((IpnbSvgOutputCell)outputCell).getSvg(); raw.data = dataRaw; raw.execution_count = outputCell.getPromptNumber(); raw.output_type = outputCell.getPromptNumber() != null ? "execute_result" : "display_data"; } else { raw.svg = ((IpnbSvgOutputCell)outputCell).getSvg(); raw.text = outputCell.getText(); } } else if (outputCell instanceof IpnbJpegOutputCell) { if (nbformat == 4) { final OutputDataRaw dataRaw = new OutputDataRaw(); dataRaw.text = outputCell.getText(); dataRaw.jpeg = Lists.newArrayList(((IpnbJpegOutputCell)outputCell).getBase64String()); raw.data = dataRaw; } else { raw.jpeg = ((IpnbJpegOutputCell)outputCell).getBase64String(); raw.text = outputCell.getText(); } } else if (outputCell instanceof IpnbLatexOutputCell) { if (nbformat == 4) { final OutputDataRaw dataRaw = new OutputDataRaw(); dataRaw.text = outputCell.getText(); if (((IpnbLatexOutputCell)outputCell).isMarkdown()) { dataRaw.markdown = ((IpnbLatexOutputCell)outputCell).getLatex(); } else { dataRaw.latex = ((IpnbLatexOutputCell)outputCell).getLatex(); } raw.data = dataRaw; raw.execution_count = outputCell.getPromptNumber(); raw.output_type = outputCell.getPromptNumber() != null ? "execute_result" : "display_data"; } else { raw.latex = ((IpnbLatexOutputCell)outputCell).getLatex(); raw.text = outputCell.getText(); raw.prompt_number = outputCell.getPromptNumber(); } } else if (outputCell instanceof IpnbStreamOutputCell) { if (nbformat == 4) { raw.name = ((IpnbStreamOutputCell)outputCell).getStream(); } else { raw.stream = ((IpnbStreamOutputCell)outputCell).getStream(); } raw.text = outputCell.getText(); raw.output_type = "stream"; } else if (outputCell instanceof IpnbHtmlOutputCell) { if (nbformat == 4) { final OutputDataRaw dataRaw = new OutputDataRaw(); dataRaw.html = ((IpnbHtmlOutputCell)outputCell).getHtmls(); dataRaw.text = outputCell.getText(); raw.data = dataRaw; raw.execution_count = outputCell.getPromptNumber(); } else { raw.html = ((IpnbHtmlOutputCell)outputCell).getHtmls(); } raw.output_type = nbformat == 4 ? "execute_result" : "pyout"; } else if (outputCell instanceof IpnbErrorOutputCell) { raw.output_type = nbformat == 4 ? "error" : "pyerr"; raw.evalue = ((IpnbErrorOutputCell)outputCell).getEvalue(); raw.ename = ((IpnbErrorOutputCell)outputCell).getEname(); raw.traceback = outputCell.getText(); } else if (outputCell instanceof IpnbOutOutputCell) { if (nbformat == 4) { raw.execution_count = outputCell.getPromptNumber(); raw.output_type = "execute_result"; final OutputDataRaw dataRaw = new OutputDataRaw(); dataRaw.text = outputCell.getText(); raw.data = dataRaw; } else { raw.output_type = "pyout"; raw.prompt_number = outputCell.getPromptNumber(); raw.text = outputCell.getText(); } } else { raw.text = outputCell.getText(); } return raw; } public IpnbOutputCell createOutput() { List<String> text = this.text != null ? this.text : data != null ? data.text : Lists.newArrayList(); Integer prompt = execution_count != null ? execution_count : prompt_number; final IpnbOutputCell outputCell; if (png != null || (data != null && data.png != null)) { outputCell = new IpnbPngOutputCell(png == null ? StringUtil.join(data.png) : png, text, prompt, metadata); } else if (jpeg != null || (data != null && data.jpeg != null)) { outputCell = new IpnbJpegOutputCell(jpeg == null ? StringUtil.join(data.jpeg, "") : jpeg, text, prompt, metadata); } else if (svg != null || (data != null && data.svg != null)) { outputCell = new IpnbSvgOutputCell(svg == null ? data.svg : svg, text, prompt, metadata); } else if (html != null || (data != null && data.html != null)) { outputCell = new IpnbHtmlOutputCell(html == null ? data.html : html, text, prompt, metadata); } else if (latex != null || (data != null && data.latex != null)) { outputCell = new IpnbLatexOutputCell(latex == null ? data.latex : latex, false, prompt, text, metadata); } else if (data != null && data.markdown != null) { outputCell = new IpnbLatexOutputCell(data.markdown, true, prompt, text, metadata); } else if (stream != null || name != null) { outputCell = new IpnbStreamOutputCell(stream == null ? name : stream, text, prompt, metadata); } else if ("pyerr".equals(output_type) || "error".equals(output_type)) { outputCell = new IpnbErrorOutputCell(evalue, ename, traceback, prompt, metadata); } else if ("pyout".equals(output_type)) { outputCell = new IpnbOutOutputCell(text, prompt, metadata); } else if ("execute_result".equals(output_type) && data != null) { outputCell = new IpnbOutOutputCell(data.text, prompt, metadata); } else if ("display_data".equals(output_type)) { outputCell = new IpnbPngOutputCell(null, text, prompt, metadata); } else { outputCell = new IpnbOutputCell(text, prompt, metadata); } return outputCell; } } private static class OutputDataRaw { @SerializedName("image/png") String png; @SerializedName("text/html") List<String> html; @SerializedName("image/svg+xml") List<String> svg; @SerializedName("image/jpeg") List<String> jpeg; @SerializedName("text/latex") List<String> latex; @SerializedName("text/plain") List<String> text; @SerializedName("text/markdown") List<String> markdown; } static class RawCellAdapter implements JsonSerializer<IpnbCellRaw> { @Override public JsonElement serialize(IpnbCellRaw cellRaw, Type typeOfSrc, JsonSerializationContext context) { final JsonObject jsonObject = new JsonObject(); jsonObject.addProperty("cell_type", cellRaw.cell_type); if ("code".equals(cellRaw.cell_type)) { final Integer count = cellRaw.execution_count; if (count == null) { jsonObject.add("execution_count", JsonNull.INSTANCE); } else { jsonObject.addProperty("execution_count", count); } } if (cellRaw.metadata != null) { final JsonElement metadata = gson.toJsonTree(cellRaw.metadata); jsonObject.add("metadata", metadata); } if (cellRaw.level != null) { jsonObject.addProperty("level", cellRaw.level); } if (cellRaw.outputs != null) { final JsonElement outputs = gson.toJsonTree(cellRaw.outputs); jsonObject.add("outputs", outputs); } if (cellRaw.source != null) { final JsonElement source = gson.toJsonTree(cellRaw.source); jsonObject.add("source", source); } if (cellRaw.input != null) { final JsonElement input = gson.toJsonTree(cellRaw.input); jsonObject.add("input", input); } if (cellRaw.language != null) { jsonObject.addProperty("language", cellRaw.language); } if (cellRaw.prompt_number != null) { jsonObject.addProperty("prompt_number", cellRaw.prompt_number); } return jsonObject; } } static class FileAdapter implements JsonSerializer<IpnbFileRaw>, JsonDeserializer<IpnbFileRaw> { @Override public JsonElement serialize(IpnbFileRaw fileRaw, Type typeOfSrc, JsonSerializationContext context) { final JsonObject jsonObject = new JsonObject(); if (fileRaw.worksheets != null) { final JsonElement worksheets = gson.toJsonTree(fileRaw.worksheets); jsonObject.add("worksheets", worksheets); } if (fileRaw.cells != null) { final JsonElement cells = gson.toJsonTree(fileRaw.cells, new TypeToken<List<IpnbCellRaw>>(){}.getType()); jsonObject.add("cells", cells); } final JsonElement metadata = gson.toJsonTree(fileRaw.metadata); jsonObject.add("metadata", metadata); jsonObject.addProperty("nbformat", fileRaw.nbformat); jsonObject.addProperty("nbformat_minor", fileRaw.nbformat_minor); return jsonObject; } @Override public IpnbFileRaw deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) throws JsonParseException { JsonObject object = json.getAsJsonObject(); IpnbFileRaw fileRaw = new IpnbFileRaw(); JsonElement worksheets = object.get("worksheets"); if (worksheets != null) { fileRaw.worksheets = gson.fromJson(worksheets, new TypeToken<List<IpnbWorksheet>>(){}.getType()); } JsonElement cellsElement = object.get("cells"); if (cellsElement != null) { fileRaw.cells = gson.fromJson(cellsElement, new TypeToken<List<IpnbCellRaw>>(){}.getType()); } JsonElement metadataElement = object.get("metadata"); if (metadataElement != null) { LinkedTreeMap<String, Object> metadataMap = gson.fromJson(metadataElement, new TypeToken<Map<String, Object>>(){}.getType()); JsonElement kernelInfo = metadataElement.getAsJsonObject().get("kernel_info"); if (kernelInfo != null) { metadataMap.put("kernel_info", gson.fromJson(kernelInfo, new TypeToken<Map<String, String>>() {}.getType())); } JsonElement languageInfo = metadataElement.getAsJsonObject().get("language_info"); if (languageInfo != null) { LinkedTreeMap<String, Object> languageInfoMap = gson.fromJson(languageInfo, new TypeToken<Map<String, Object>>() {}.getType()); JsonElement codemirrorMode = languageInfo.getAsJsonObject().get("codemirror_mode"); if (codemirrorMode != null) { LinkedTreeMap<String, Object> codemirrorModeMap = gson.fromJson(codemirrorMode, new TypeToken<Map<String, String>>() {}.getType()); if (codemirrorModeMap.containsKey("version")) { String version = (String)codemirrorModeMap.get("version"); if (NumberUtils.isNumber(version)) { try { codemirrorModeMap.put("version", Integer.parseInt(version)); } catch (NumberFormatException e) { // added this to obtain backward compatibility as previously we parsed "version" as double. codemirrorModeMap.put("version", (int) Double.parseDouble(version)); } } } languageInfoMap.put("codemirror_mode", codemirrorModeMap); } metadataMap.put("language_info", languageInfoMap); } fileRaw.metadata = metadataMap; } JsonElement nbformat = object.get("nbformat"); if (nbformat != null) { fileRaw.nbformat = nbformat.getAsInt(); } JsonElement nbformatMinor = object.get("nbformat_minor"); if (nbformatMinor != null) { fileRaw.nbformat_minor = nbformatMinor.getAsInt(); } return fileRaw; } } static class CellRawDeserializer implements JsonDeserializer<IpnbCellRaw> { @Override public IpnbCellRaw deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) throws JsonParseException { final JsonObject object = json.getAsJsonObject(); final IpnbCellRaw cellRaw = new IpnbCellRaw(); final JsonElement cell_type = object.get("cell_type"); if (cell_type != null) { cellRaw.cell_type = cell_type.getAsString(); } final JsonElement count = object.get("execution_count"); if (count != null) { cellRaw.execution_count = count.isJsonNull() ? null : count.getAsInt(); } final JsonElement metadata = object.get("metadata"); if (metadata != null) { cellRaw.metadata = gson.fromJson(metadata, Map.class); } final JsonElement level = object.get("level"); if (level != null) { cellRaw.level = level.getAsInt(); } final JsonElement outputsElement = object.get("outputs"); if (outputsElement != null) { final JsonArray outputs = outputsElement.getAsJsonArray(); cellRaw.outputs = Lists.newArrayList(); for (JsonElement output : outputs) { cellRaw.outputs.add(gson.fromJson(output, CellOutputRaw.class)); } } cellRaw.source = getStringOrArray("source", object); cellRaw.input = getStringOrArray("input", object); final JsonElement language = object.get("language"); if (language != null) { cellRaw.language = language.getAsString(); } final JsonElement number = object.get("prompt_number"); if (number != null) { if ("*".equals(number.getAsString())) { cellRaw.prompt_number = null; } else { cellRaw.prompt_number = number.getAsInt(); } } return cellRaw; } } static class OutputDataDeserializer implements JsonDeserializer<OutputDataRaw> { @Override public OutputDataRaw deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) throws JsonParseException { final JsonObject object = json.getAsJsonObject(); final OutputDataRaw dataRaw = new OutputDataRaw(); final JsonElement png = object.get("image/png"); if (png instanceof JsonArray) { final JsonArray array = png.getAsJsonArray(); StringBuilder pngString = new StringBuilder(); for (int i = 0; i != array.size(); ++i) { pngString.append(array.get(i).getAsString()); } dataRaw.png = pngString.toString(); } else if (png instanceof JsonPrimitive) { dataRaw.png = png.getAsString(); } dataRaw.html = getStringOrArray("text/html", object); dataRaw.svg = getStringOrArray("image/svg+xml", object); dataRaw.jpeg = getStringOrArray("image/jpeg", object); dataRaw.latex = getStringOrArray("text/latex", object); dataRaw.markdown = getStringOrArray("text/markdown", object); dataRaw.text = getStringOrArray("text/plain", object); return dataRaw; } } static class CellOutputDeserializer implements JsonDeserializer<CellOutputRaw> { @Override public CellOutputRaw deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) throws JsonParseException { final JsonObject object = json.getAsJsonObject(); final CellOutputRaw cellOutputRaw = new CellOutputRaw(); final JsonElement ename = object.get("ename"); if (ename != null) { cellOutputRaw.ename = ename.getAsString(); } final JsonElement name = object.get("name"); if (name != null) { cellOutputRaw.name = name.getAsString(); } final JsonElement evalue = object.get("evalue"); if (evalue != null) { cellOutputRaw.evalue = evalue.getAsString(); } final JsonElement data = object.get("data"); if (data != null) { cellOutputRaw.data = gson.fromJson(data, OutputDataRaw.class); } final JsonElement count = object.get("execution_count"); if (count != null) { cellOutputRaw.execution_count = count.getAsInt(); } final JsonElement outputType = object.get("output_type"); if (outputType != null) { cellOutputRaw.output_type = outputType.getAsString(); } final JsonElement png = object.get("png"); if (png != null) { cellOutputRaw.png = png.getAsString(); } final JsonElement stream = object.get("stream"); if (stream != null) { cellOutputRaw.stream = stream.getAsString(); } final JsonElement jpeg = object.get("jpeg"); if (jpeg != null) { cellOutputRaw.jpeg = jpeg.getAsString(); } cellOutputRaw.html = getStringOrArray("html", object); cellOutputRaw.latex = getStringOrArray("latex", object); cellOutputRaw.svg = getStringOrArray("svg", object); final JsonElement promptNumber = object.get("prompt_number"); if (promptNumber != null) { cellOutputRaw.prompt_number = promptNumber.getAsInt(); } cellOutputRaw.text = getStringOrArray("text", object); cellOutputRaw.traceback = getStringOrArray("traceback", object); final JsonElement metadata = object.get("metadata"); if (metadata != null) { cellOutputRaw.metadata = gson.fromJson(metadata, Map.class); } return cellOutputRaw; } } @Nullable private static ArrayList<String> getStringOrArray(String name, JsonObject object) { final JsonElement jsonElement = object.get(name); final ArrayList<String> strings = Lists.newArrayList(); if (jsonElement == null) return null; if (jsonElement.isJsonArray()) { final JsonArray array = jsonElement.getAsJsonArray(); for (JsonElement element : array) { strings.add(element.getAsString()); } } else { strings.add(jsonElement.getAsString()); } return strings; } static class OutputsAdapter implements JsonSerializer<CellOutputRaw> { @Override public JsonElement serialize(CellOutputRaw cellRaw, Type typeOfSrc, JsonSerializationContext context) { final JsonObject jsonObject = new JsonObject(); if (cellRaw.ename != null) { jsonObject.addProperty("ename", cellRaw.ename); } if (cellRaw.name != null) { jsonObject.addProperty("name", cellRaw.name); } if (cellRaw.evalue != null) { jsonObject.addProperty("evalue", cellRaw.evalue); } if (cellRaw.data != null) { final JsonElement data = gson.toJsonTree(cellRaw.data); jsonObject.add("data", data); } if (cellRaw.execution_count != null) { jsonObject.addProperty("execution_count", cellRaw.execution_count); } if (cellRaw.png != null) { jsonObject.addProperty("png", cellRaw.png); } if (cellRaw.stream != null) { jsonObject.addProperty("stream", cellRaw.stream); } if (cellRaw.jpeg != null) { jsonObject.addProperty("jpeg", cellRaw.jpeg); } if (cellRaw.html != null) { final JsonElement html = gson.toJsonTree(cellRaw.html); jsonObject.add("html", html); } if (cellRaw.latex != null) { final JsonElement latex = gson.toJsonTree(cellRaw.latex); jsonObject.add("latex", latex); } if (cellRaw.svg != null) { final JsonElement svg = gson.toJsonTree(cellRaw.svg); jsonObject.add("svg", svg); } if (cellRaw.prompt_number != null) { jsonObject.addProperty("prompt_number", cellRaw.prompt_number); } if (cellRaw.traceback != null) { final JsonElement traceback = gson.toJsonTree(cellRaw.traceback); jsonObject.add("traceback", traceback); } if (cellRaw.metadata != null) { final JsonElement metadata = gson.toJsonTree(cellRaw.metadata); jsonObject.add("metadata", metadata); } if (cellRaw.output_type != null) { jsonObject.addProperty("output_type", cellRaw.output_type); } if (cellRaw.text != null) { final JsonElement text = gson.toJsonTree(cellRaw.text); jsonObject.add("text", text); } return jsonObject; } } static class OutputDataAdapter implements JsonSerializer<OutputDataRaw> { @Override public JsonElement serialize(OutputDataRaw cellRaw, Type typeOfSrc, JsonSerializationContext context) { final JsonObject jsonObject = new JsonObject(); if (cellRaw.png != null) { jsonObject.addProperty("image/png", cellRaw.png); } if (cellRaw.html != null) { final JsonElement html = gson.toJsonTree(cellRaw.html); jsonObject.add("text/html", html); } if (cellRaw.svg != null) { final JsonElement svg = gson.toJsonTree(cellRaw.svg); jsonObject.add("image/svg+xml", svg); } if (cellRaw.jpeg != null) { final JsonElement jpeg = gson.toJsonTree(cellRaw.jpeg); jsonObject.add("image/jpeg", jpeg); } if (cellRaw.latex != null) { final JsonElement latex = gson.toJsonTree(cellRaw.latex); jsonObject.add("text/latex", latex); } if (cellRaw.markdown != null) { final JsonElement markdown = gson.toJsonTree(cellRaw.markdown); jsonObject.add("text/markdown", markdown); } if (cellRaw.text != null) { final JsonElement text = gson.toJsonTree(cellRaw.text); jsonObject.add("text/plain", text); } return jsonObject; } } }
/* * Copyright 2013 * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. */ package org.openntf.domino.impl; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.Vector; import lotus.domino.NotesException; import org.openntf.domino.Database; import org.openntf.domino.DateTime; import org.openntf.domino.Session; import org.openntf.domino.WrapperFactory; import org.openntf.domino.iterators.NoteIterator; import org.openntf.domino.utils.DominoUtils; // TODO: Auto-generated Javadoc /** * The Class NoteCollection. */ /** * @author withersp * */ public class NoteCollection extends BaseThreadSafe<org.openntf.domino.NoteCollection, lotus.domino.NoteCollection, Database> implements org.openntf.domino.NoteCollection { //private static final Logger log_ = Logger.getLogger(NoteCollection.class.getName()); /** * Instantiates a new outline. * * @param delegate * the delegate * @param parent * the parent */ protected NoteCollection(final lotus.domino.NoteCollection delegate, final Database parent) { super(delegate, parent, NOTES_NOTECOLLECTION); } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#add(int) */ @Override public void add(final int additionSpecifier) { try { getDelegate().add(additionSpecifier); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#add(int[]) */ @Override public void add(final int[] additionSpecifier) { try { getDelegate().add(additionSpecifier); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#add(lotus.domino.Agent) */ @Override public void add(final lotus.domino.Agent additionSpecifier) { try { getDelegate().add(toLotus(additionSpecifier)); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#add(lotus.domino.Document) */ @Override public void add(final lotus.domino.Document additionSpecifier) { try { getDelegate().add(toLotus(additionSpecifier)); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#add(lotus.domino.DocumentCollection) */ @Override public void add(final lotus.domino.DocumentCollection additionSpecifier) { try { // TODO Figure out why the normal add() line with the DC throws a NotesException("Invalid object type for method argument") //if (additionSpecifier instanceof lotus.domino.DocumentCollection) { getDelegate().add(additionSpecifier); // TODO RPr: use toLotus? // } else { // if (log_.isLoggable(Level.WARNING)) { // log_.log( // Level.WARNING, // "Attempting to add a native lotus.domino.DocumentCollection to an org.openntf.domino.NoteCollection. Because we cannot know the use of the DocumentCollection argument later, we cannot auto-recycle. You really shouldn't mix your API types."); // } // lotus.domino.Document doc = additionSpecifier.getFirstDocument(); // while (doc != null) { // getDelegate().add(toLotus(doc)); // // doc = additionSpecifier.getNextDocument(doc); // } // } } catch (NotesException e) { // e.printStackTrace(); DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#add(lotus.domino.Form) */ @Override public void add(final lotus.domino.Form additionSpecifier) { try { getDelegate().add(toLotus(additionSpecifier)); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#add(lotus.domino.NoteCollection) */ @Override public void add(final lotus.domino.NoteCollection additionSpecifier) { try { getDelegate().add(toLotus(additionSpecifier)); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#add(lotus.domino.View) */ @Override public void add(final lotus.domino.View additionSpecifier) { try { getDelegate().add(toLotus(additionSpecifier)); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#add(java.lang.String) */ @Override public void add(final String additionSpecifier) { try { getDelegate().add(additionSpecifier); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#buildCollection() */ @Override public void buildCollection() { try { lotus.domino.NoteCollection nc = getDelegate(); if (nc == null) { System.out.println("Delegate is null???"); } else { getDelegate().buildCollection(); } } catch (Throwable t) { DominoUtils.handleException(t); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#clearCollection() */ @Override public void clearCollection() { try { getDelegate().clearCollection(); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#getCount() */ @Override public int getCount() { try { return getDelegate().getCount(); } catch (NotesException e) { DominoUtils.handleException(e); } return 0; } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#getFirstNoteID() */ @Override public String getFirstNoteID() { try { return getDelegate().getFirstNoteID(); } catch (NotesException e) { DominoUtils.handleException(e); } return null; } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#getLastBuildTime() */ @Override public DateTime getLastBuildTime() { try { return fromLotus(getDelegate().getLastBuildTime(), DateTime.SCHEMA, getAncestorSession()); } catch (NotesException e) { DominoUtils.handleException(e); } return null; } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#getLastNoteID() */ @Override public String getLastNoteID() { try { return getDelegate().getLastNoteID(); } catch (NotesException e) { DominoUtils.handleException(e); } return null; } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#getNextNoteID(java.lang.String) */ @Override public String getNextNoteID(final String noteId) { try { return getDelegate().getNextNoteID(noteId); } catch (NotesException e) { DominoUtils.handleException(e); } return null; } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#getNoteIDs() */ @Override public int[] getNoteIDs() { try { return getDelegate().getNoteIDs(); } catch (NotesException e) { DominoUtils.handleException(e); } return null; } /* * (non-Javadoc) * * @see org.openntf.domino.impl.Base#getParent() */ @Override public final Database getParent() { return parent; } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#getPrevNoteID(java.lang.String) */ @Override public String getPrevNoteID(final String noteId) { try { return getDelegate().getPrevNoteID(noteId); } catch (NotesException e) { DominoUtils.handleException(e); } return null; } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#getSelectAcl() */ @Override public boolean getSelectAcl() { try { return getDelegate().getSelectAcl(); } catch (NotesException e) { DominoUtils.handleException(e); } return false; } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#getSelectActions() */ @Override public boolean getSelectActions() { try { return getDelegate().getSelectActions(); } catch (NotesException e) { DominoUtils.handleException(e); return false; } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#getSelectAgents() */ @Override public boolean getSelectAgents() { try { return getDelegate().getSelectAgents(); } catch (NotesException e) { DominoUtils.handleException(e); return false; } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#getSelectDatabaseScript() */ @Override public boolean getSelectDatabaseScript() { try { return getDelegate().getSelectDatabaseScript(); } catch (NotesException e) { DominoUtils.handleException(e); return false; } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#getSelectDataConnections() */ @Override public boolean getSelectDataConnections() { try { return getDelegate().getSelectDataConnections(); } catch (NotesException e) { DominoUtils.handleException(e); return false; } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#getSelectDocuments() */ @Override public boolean getSelectDocuments() { try { return getDelegate().getSelectDocuments(); } catch (NotesException e) { DominoUtils.handleException(e); return false; } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#getSelectFolders() */ @Override public boolean getSelectFolders() { try { return getDelegate().getSelectFolders(); } catch (NotesException e) { DominoUtils.handleException(e); return false; } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#getSelectForms() */ @Override public boolean getSelectForms() { try { return getDelegate().getSelectForms(); } catch (NotesException e) { DominoUtils.handleException(e); return false; } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#getSelectFramesets() */ @Override public boolean getSelectFramesets() { try { return getDelegate().getSelectFramesets(); } catch (NotesException e) { DominoUtils.handleException(e); return false; } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#getSelectHelpAbout() */ @Override public boolean getSelectHelpAbout() { try { return getDelegate().getSelectHelpAbout(); } catch (NotesException e) { DominoUtils.handleException(e); return false; } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#getSelectHelpIndex() */ @Override public boolean getSelectHelpIndex() { try { return getDelegate().getSelectHelpIndex(); } catch (NotesException e) { DominoUtils.handleException(e); return false; } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#getSelectHelpUsing() */ @Override public boolean getSelectHelpUsing() { try { return getDelegate().getSelectHelpUsing(); } catch (NotesException e) { DominoUtils.handleException(e); return false; } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#getSelectIcon() */ @Override public boolean getSelectIcon() { try { return getDelegate().getSelectIcon(); } catch (NotesException e) { DominoUtils.handleException(e); return false; } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#getSelectImageResources() */ @Override public boolean getSelectImageResources() { try { return getDelegate().getSelectImageResources(); } catch (NotesException e) { DominoUtils.handleException(e); return false; } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#getSelectionFormula() */ @Override public String getSelectionFormula() { try { return getDelegate().getSelectionFormula(); } catch (NotesException e) { DominoUtils.handleException(e); return null; } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#getSelectJavaResources() */ @Override public boolean getSelectJavaResources() { try { return getDelegate().getSelectJavaResources(); } catch (NotesException e) { DominoUtils.handleException(e); return false; } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#getSelectMiscCodeElements() */ @Override public boolean getSelectMiscCodeElements() { try { return getDelegate().getSelectMiscCodeElements(); } catch (NotesException e) { DominoUtils.handleException(e); return false; } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#getSelectMiscFormatElements() */ @Override public boolean getSelectMiscFormatElements() { try { return getDelegate().getSelectMiscFormatElements(); } catch (NotesException e) { DominoUtils.handleException(e); return false; } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#getSelectMiscIndexElements() */ @Override public boolean getSelectMiscIndexElements() { try { return getDelegate().getSelectMiscIndexElements(); } catch (NotesException e) { DominoUtils.handleException(e); return false; } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#getSelectNavigators() */ @Override public boolean getSelectNavigators() { try { return getDelegate().getSelectNavigators(); } catch (NotesException e) { DominoUtils.handleException(e); return false; } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#getSelectOutlines() */ @Override public boolean getSelectOutlines() { try { return getDelegate().getSelectOutlines(); } catch (NotesException e) { DominoUtils.handleException(e); return false; } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#getSelectPages() */ @Override public boolean getSelectPages() { try { return getDelegate().getSelectPages(); } catch (NotesException e) { DominoUtils.handleException(e); return false; } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#getSelectProfiles() */ @Override public boolean getSelectProfiles() { try { return getDelegate().getSelectProfiles(); } catch (NotesException e) { DominoUtils.handleException(e); return false; } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#getSelectReplicationFormulas() */ @Override public boolean getSelectReplicationFormulas() { try { return getDelegate().getSelectReplicationFormulas(); } catch (NotesException e) { DominoUtils.handleException(e); return false; } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#getSelectScriptLibraries() */ @Override public boolean getSelectScriptLibraries() { try { return getDelegate().getSelectScriptLibraries(); } catch (NotesException e) { DominoUtils.handleException(e); return false; } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#getSelectSharedFields() */ @Override public boolean getSelectSharedFields() { try { return getDelegate().getSelectSharedFields(); } catch (NotesException e) { DominoUtils.handleException(e); return false; } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#getSelectStylesheetResources() */ @Override public boolean getSelectStylesheetResources() { try { return getDelegate().getSelectStylesheetResources(); } catch (NotesException e) { DominoUtils.handleException(e); return false; } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#getSelectSubforms() */ @Override public boolean getSelectSubforms() { try { return getDelegate().getSelectSubforms(); } catch (NotesException e) { DominoUtils.handleException(e); return false; } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#getSelectViews() */ @Override public boolean getSelectViews() { try { return getDelegate().getSelectViews(); } catch (NotesException e) { DominoUtils.handleException(e); return false; } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#getSinceTime() */ @Override public DateTime getSinceTime() { try { return fromLotus(getDelegate().getSinceTime(), DateTime.SCHEMA, getAncestorSession()); } catch (NotesException e) { DominoUtils.handleException(e); return null; } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#getUNID(java.lang.String) */ @Override public String getUNID(final String noteid) { try { return getDelegate().getUNID(noteid); } catch (NotesException e) { DominoUtils.handleException(e); return null; } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#getUntilTime() */ @Override public DateTime getUntilTime() { try { return fromLotus(getDelegate().getUntilTime(), DateTime.SCHEMA, getAncestorSession()); } catch (NotesException e) { DominoUtils.handleException(e); return null; } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#intersect(lotus.domino.Agent) */ @Override public void intersect(final lotus.domino.Agent agent) { try { getDelegate().intersect(toLotus(agent)); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#intersect(lotus.domino.Document) */ @Override public void intersect(final lotus.domino.Document document) { try { getDelegate().intersect(toLotus(document)); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#intersect(lotus.domino.DocumentCollection) */ @Override public void intersect(final lotus.domino.DocumentCollection collection) { try { getDelegate().intersect(toLotus(collection)); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#intersect(lotus.domino.Form) */ @Override public void intersect(final lotus.domino.Form form) { try { getDelegate().intersect(toLotus(form)); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#intersect(int) */ @Override public void intersect(final int noteId) { try { getDelegate().intersect(noteId); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#intersect(lotus.domino.NoteCollection) */ @Override public void intersect(final lotus.domino.NoteCollection collection) { try { getDelegate().intersect(toLotus(collection)); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#intersect(java.lang.String) */ @Override public void intersect(final String noteId) { try { getDelegate().intersect(noteId); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#intersect(lotus.domino.View) */ @Override public void intersect(final lotus.domino.View view) { try { getDelegate().intersect(toLotus(view)); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see java.lang.Iterable#iterator() */ @Override public Iterator<String> iterator() { return new NoteIterator(this); } /* * (non-Javadoc) * * @see org.openntf.domino.impl.Base#recycle() */ @Override public void recycle() { try { getDelegate().recycle(); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.impl.Base#recycle(java.util.Vector) */ @SuppressWarnings("rawtypes") @Override public void recycle(final Vector objects) { try { getDelegate().recycle(objects); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#remove(lotus.domino.Agent) */ @Override public void remove(final lotus.domino.Agent agent) { try { getDelegate().remove(toLotus(agent)); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#remove(lotus.domino.Document) */ @Override public void remove(final lotus.domino.Document document) { try { getDelegate().remove(toLotus(document)); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#remove(lotus.domino.DocumentCollection) */ @Override public void remove(final lotus.domino.DocumentCollection collection) { try { getDelegate().remove(toLotus(collection)); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#remove(lotus.domino.Form) */ @Override public void remove(final lotus.domino.Form form) { try { getDelegate().remove(toLotus(form)); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#remove(int) */ @Override public void remove(final int noteId) { try { getDelegate().remove(noteId); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#remove(lotus.domino.NoteCollection) */ @Override public void remove(final lotus.domino.NoteCollection collection) { try { getDelegate().remove(toLotus(collection)); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#remove(java.lang.String) */ @Override public void remove(final String noteId) { try { getDelegate().remove(noteId); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#remove(lotus.domino.View) */ @Override public void remove(final lotus.domino.View view) { try { getDelegate().remove(toLotus(view)); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#selectAllAdminNotes(boolean) */ @Override public void selectAllAdminNotes(final boolean flag) { try { getDelegate().selectAllAdminNotes(flag); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#selectAllCodeElements(boolean) */ @Override public void selectAllCodeElements(final boolean flag) { try { getDelegate().selectAllCodeElements(flag); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#selectAllDataNotes(boolean) */ @Override public void selectAllDataNotes(final boolean flag) { try { getDelegate().selectAllDataNotes(flag); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#selectAllDesignElements(boolean) */ @Override public void selectAllDesignElements(final boolean flag) { try { getDelegate().selectAllDesignElements(flag); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#selectAllFormatElements(boolean) */ @Override public void selectAllFormatElements(final boolean flag) { try { getDelegate().selectAllFormatElements(flag); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#selectAllIndexElements(boolean) */ @Override public void selectAllIndexElements(final boolean flag) { try { getDelegate().selectAllIndexElements(flag); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#selectAllNotes(boolean) */ @Override public void selectAllNotes(final boolean flag) { try { getDelegate().selectAllNotes(flag); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#setSelectAcl(boolean) */ @Override public void setSelectAcl(final boolean flag) { try { getDelegate().setSelectAcl(flag); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#setSelectActions(boolean) */ @Override public void setSelectActions(final boolean flag) { try { getDelegate().setSelectActions(flag); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#setSelectAgents(boolean) */ @Override public void setSelectAgents(final boolean flag) { try { getDelegate().setSelectAgents(flag); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#setSelectDatabaseScript(boolean) */ @Override public void setSelectDatabaseScript(final boolean flag) { try { getDelegate().setSelectDatabaseScript(flag); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#setSelectDataConnections(boolean) */ @Override public void setSelectDataConnections(final boolean flag) { try { getDelegate().setSelectDataConnections(flag); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#setSelectDocuments(boolean) */ @Override public void setSelectDocuments(final boolean flag) { try { getDelegate().setSelectDocuments(flag); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#setSelectFolders(boolean) */ @Override public void setSelectFolders(final boolean flag) { try { getDelegate().setSelectFolders(flag); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#setSelectForms(boolean) */ @Override public void setSelectForms(final boolean flag) { try { getDelegate().setSelectForms(flag); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#setSelectFramesets(boolean) */ @Override public void setSelectFramesets(final boolean flag) { try { getDelegate().setSelectFramesets(flag); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#setSelectHelpAbout(boolean) */ @Override public void setSelectHelpAbout(final boolean flag) { try { getDelegate().setSelectHelpAbout(flag); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#setSelectHelpIndex(boolean) */ @Override public void setSelectHelpIndex(final boolean flag) { try { getDelegate().setSelectHelpIndex(flag); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#setSelectHelpUsing(boolean) */ @Override public void setSelectHelpUsing(final boolean flag) { try { getDelegate().setSelectHelpUsing(flag); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#setSelectIcon(boolean) */ @Override public void setSelectIcon(final boolean flag) { try { getDelegate().setSelectIcon(flag); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#setSelectImageResources(boolean) */ @Override public void setSelectImageResources(final boolean flag) { try { getDelegate().setSelectImageResources(flag); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#setSelectionFormula(java.lang.String) */ @Override public void setSelectionFormula(final String formula) { try { getDelegate().setSelectionFormula(formula); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#setSelectJavaResources(boolean) */ @Override public void setSelectJavaResources(final boolean flag) { try { getDelegate().setSelectJavaResources(flag); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#setSelectMiscCodeElements(boolean) */ @Override public void setSelectMiscCodeElements(final boolean flag) { try { getDelegate().setSelectMiscCodeElements(flag); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#setSelectMiscFormatElements(boolean) */ @Override public void setSelectMiscFormatElements(final boolean flag) { try { getDelegate().setSelectMiscFormatElements(flag); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#setSelectMiscIndexElements(boolean) */ @Override public void setSelectMiscIndexElements(final boolean flag) { try { getDelegate().setSelectMiscIndexElements(flag); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#setSelectNavigators(boolean) */ @Override public void setSelectNavigators(final boolean flag) { try { getDelegate().setSelectNavigators(flag); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#setSelectOutlines(boolean) */ @Override public void setSelectOutlines(final boolean flag) { try { getDelegate().setSelectOutlines(flag); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#setSelectPages(boolean) */ @Override public void setSelectPages(final boolean flag) { try { getDelegate().setSelectPages(flag); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#setSelectProfiles(boolean) */ @Override public void setSelectProfiles(final boolean flag) { try { getDelegate().setSelectProfiles(flag); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#setSelectReplicationFormulas(boolean) */ @Override public void setSelectReplicationFormulas(final boolean flag) { try { getDelegate().setSelectReplicationFormulas(flag); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#setSelectScriptLibraries(boolean) */ @Override public void setSelectScriptLibraries(final boolean flag) { try { getDelegate().setSelectScriptLibraries(flag); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#setSelectSharedFields(boolean) */ @Override public void setSelectSharedFields(final boolean flag) { try { getDelegate().setSelectSharedFields(flag); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#setSelectStylesheetResources(boolean) */ @Override public void setSelectStylesheetResources(final boolean flag) { try { getDelegate().setSelectStylesheetResources(flag); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#setSelectSubforms(boolean) */ @Override public void setSelectSubforms(final boolean flag) { try { getDelegate().setSelectSubforms(flag); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#setSelectViews(boolean) */ @Override public void setSelectViews(final boolean flag) { try { getDelegate().setSelectViews(flag); } catch (NotesException e) { DominoUtils.handleException(e); } } /* * (non-Javadoc) * * @see org.openntf.domino.NoteCollection#setSinceTime(lotus.domino.DateTime) */ @Override public void setSinceTime(final lotus.domino.DateTime date) { @SuppressWarnings("rawtypes") List recycleThis = new ArrayList(); try { getDelegate().setSinceTime(toLotus(date, recycleThis)); } catch (NotesException e) { DominoUtils.handleException(e); } finally { s_recycle(recycleThis); } } @Override @SuppressWarnings("unchecked") public void setSinceTime(final java.util.Date date) { @SuppressWarnings("rawtypes") List recycleThis = new ArrayList(); try { lotus.domino.DateTime dt = (lotus.domino.DateTime) toDominoFriendly(date, getAncestorSession(), recycleThis); getDelegate().setSinceTime(dt); } catch (NotesException e) { DominoUtils.handleException(e); } finally { s_recycle(recycleThis); } } /* * (non-Javadoc) * * @see org.openntf.domino.types.DatabaseDescendant#getAncestorDatabase() */ @Override public final Database getAncestorDatabase() { return parent; } /* * (non-Javadoc) * * @see org.openntf.domino.types.SessionDescendant#getAncestorSession() */ @Override public final Session getAncestorSession() { return this.getAncestorDatabase().getAncestorSession(); } /* * (non-Javadoc) * * @see org.openntf.domino.ext.NoteCollection#setSelectOptions(java.util.Set) */ @Override public void setSelectOptions(final Set<SelectOption> options) { boolean select = true; if (options.contains(SelectOption.ALL_BUT_NOT)) { selectAllNotes(true); select = false; } for (SelectOption option : options) { switch (option) { case ACL: setSelectAcl(select); break; case ACTIONS: setSelectActions(select); break; case AGENTS: setSelectAgents(select); break; case DATABASE_SCRIPT: setSelectDatabaseScript(select); break; case DATA_CONNECTIONS: setSelectDataConnections(select); break; case DOCUMENTS: setSelectDocuments(select); break; case FOLDERS: setSelectFolders(select); break; case FORMS: setSelectForms(select); break; case FRAMESETS: setSelectFramesets(select); break; case HELP_ABOUT: setSelectHelpAbout(select); break; case HELP_INDEX: setSelectHelpIndex(select); break; case HELP_USING: setSelectHelpUsing(select); break; case ICON: setSelectIcon(select); break; case IMAGE_RESOURCES: setSelectImageResources(select); break; case JAVA_RESOURCES: setSelectJavaResources(select); break; case MISC_CODE: setSelectMiscCodeElements(select); break; case MISC_FORMAT: setSelectMiscFormatElements(select); break; case MISC_INDEX: setSelectMiscIndexElements(select); break; case NAVIGATORS: setSelectNavigators(select); break; case OUTLINES: setSelectOutlines(select); break; case PAGES: setSelectPages(select); break; case PROFILES: setSelectProfiles(select); break; case REPLICATION_FORMULAS: setSelectReplicationFormulas(select); break; case SCRIPT_LIBRARIES: setSelectScriptLibraries(select); break; case SHARED_FIELDS: setSelectSharedFields(select); break; case STYLESHEETS: setSelectStylesheetResources(select); break; case SUBFORMS: setSelectSubforms(select); break; case VIEWS: setSelectViews(select); break; case ALL_BUT_NOT: break; default: break; } } } @Override protected WrapperFactory getFactory() { return parent.getAncestorSession().getFactory(); } }
/* * Copyright 2009 Martin Grotzke * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an &quot;AS IS&quot; BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package de.javakaffee.web.msm; import static de.javakaffee.web.msm.SessionValidityInfo.encode; import static de.javakaffee.web.msm.integration.TestUtils.STICKYNESS_PROVIDER; import static de.javakaffee.web.msm.integration.TestUtils.createContext; import static de.javakaffee.web.msm.integration.TestUtils.createSession; import static org.mockito.Matchers.*; import static org.mockito.Mockito.*; import static org.testng.Assert.*; import java.util.Arrays; import java.util.Map; import java.util.concurrent.Callable; import java.util.concurrent.CyclicBarrier; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import javax.annotation.Nonnull; import net.spy.memcached.MemcachedClient; import net.spy.memcached.internal.OperationFuture; import org.apache.catalina.LifecycleException; import org.apache.catalina.connector.Request; import org.apache.catalina.core.StandardContext; import org.mockito.ArgumentCaptor; import org.testng.Assert; import org.testng.annotations.AfterMethod; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; import de.javakaffee.web.msm.BackupSessionTask.BackupResult; import de.javakaffee.web.msm.LockingStrategy.LockingMode; import de.javakaffee.web.msm.MemcachedSessionService.SessionManager; import de.javakaffee.web.msm.integration.TestUtils; import de.javakaffee.web.msm.integration.TestUtils.SessionAffinityMode; /** * Test the {@link MemcachedSessionService}. * * @author <a href="mailto:martin.grotzke@javakaffee.de">Martin Grotzke</a> */ public abstract class MemcachedSessionServiceTest { private MemcachedSessionService _service; private MemcachedClient _memcachedMock; private ExecutorService _executor; @SuppressWarnings("unchecked") @BeforeMethod public void setup() throws Exception { final SessionManager manager = createSessionManager(); _service = manager.getMemcachedSessionService(); _service.setMemcachedNodes( "n1:127.0.0.1:11211" ); _service.setSessionBackupAsync( false ); _service.setSticky( true ); final StandardContext context = createContext(); context.setBackgroundProcessorDelay( 1 ); // needed for test of updateExpiration manager.setContainer( context ); _memcachedMock = mock( MemcachedClient.class ); final OperationFuture<Boolean> setResultMock = mock( OperationFuture.class ); when( setResultMock.get( anyInt(), any( TimeUnit.class ) ) ).thenReturn( Boolean.TRUE ); when( _memcachedMock.set( any( String.class ), anyInt(), any() ) ).thenReturn( setResultMock ); final OperationFuture<Boolean> deleteResultMock = mock( OperationFuture.class ); when( deleteResultMock.get() ).thenReturn( Boolean.TRUE ); when( _memcachedMock.delete( anyString() ) ).thenReturn( deleteResultMock ); startInternal( manager, _memcachedMock ); _executor = Executors.newCachedThreadPool(); } @AfterMethod public void afterMethod() { _executor.shutdown(); } protected void startInternal( @Nonnull final SessionManager manager, @Nonnull final MemcachedClient memcachedMock ) throws LifecycleException { throw new UnsupportedOperationException(); } @Nonnull protected abstract SessionManager createSessionManager(); @Test public void testConfigurationFormatMemcachedNodesFeature44() throws LifecycleException { _service.setMemcachedNodes( "n1:127.0.0.1:11211" ); _service.startInternal(_memcachedMock); Assert.assertEquals( _service.getNodeIds(), Arrays.asList( "n1" ) ); _service.setMemcachedNodes( "n1:127.0.0.1:11211 n2:127.0.0.1:11212" ); _service.startInternal(_memcachedMock); Assert.assertEquals( _service.getNodeIds(), Arrays.asList( "n1", "n2" ) ); _service.setMemcachedNodes( "n1:127.0.0.1:11211,n2:127.0.0.1:11212" ); _service.startInternal(_memcachedMock); Assert.assertEquals( _service.getNodeIds(), Arrays.asList( "n1", "n2" ) ); } @Test public void testConfigurationFormatFailoverNodesFeature44() throws LifecycleException { _service.setMemcachedNodes( "n1:127.0.0.1:11211 n2:127.0.0.1:11212" ); _service.setFailoverNodes( "n1" ); _service.startInternal(_memcachedMock); Assert.assertEquals( _service.getFailoverNodeIds(), Arrays.asList( "n1" ) ); _service.setMemcachedNodes( "n1:127.0.0.1:11211 n2:127.0.0.1:11212 n3:127.0.0.1:11213" ); _service.setFailoverNodes( "n1 n2" ); _service.startInternal(_memcachedMock); Assert.assertEquals( _service.getFailoverNodeIds(), Arrays.asList( "n1", "n2" ) ); _service.setMemcachedNodes( "n1:127.0.0.1:11211 n2:127.0.0.1:11212 n3:127.0.0.1:11213" ); _service.setFailoverNodes( "n1,n2" ); _service.startInternal(_memcachedMock); Assert.assertEquals( _service.getFailoverNodeIds(), Arrays.asList( "n1", "n2" ) ); } /** * Test for issue #105: Make memcached node optional for single-node setup * http://code.google.com/p/memcached-session-manager/issues/detail?id=105 */ @Test public void testConfigurationFormatMemcachedNodesFeature105() throws LifecycleException { _service.setMemcachedNodes( "127.0.0.1:11211" ); _service.startInternal(_memcachedMock); assertEquals(_service.getMemcachedNodesManager().getCountNodes(), 1); assertEquals(_service.getMemcachedNodesManager().isEncodeNodeIdInSessionId(), false); assertEquals(_service.getMemcachedNodesManager().isValidForMemcached("123456"), true); _service.shutdown(); _service.setMemcachedNodes( "n1:127.0.0.1:11211" ); _service.startInternal(_memcachedMock); assertEquals(_service.getMemcachedNodesManager().getCountNodes(), 1); assertEquals(_service.getMemcachedNodesManager().isEncodeNodeIdInSessionId(), true); assertEquals(_service.getMemcachedNodesManager().isValidForMemcached("123456"), false); assertEquals(_service.getMemcachedNodesManager().isValidForMemcached("123456-n1"), true); } /** * Test for issue #105: Make memcached node optional for single-node setup * http://code.google.com/p/memcached-session-manager/issues/detail?id=105 */ @Test public void testBackupSessionFailureWithoutMemcachedNodeIdConfigured105() throws Exception { _service.setMemcachedNodes( "127.0.0.1:11211" ); _service.setSessionBackupAsync(false); _service.startInternal(_memcachedMock); final MemcachedBackupSession session = createSession( _service ); session.access(); session.endAccess(); session.setAttribute( "foo", "bar" ); @SuppressWarnings( "unchecked" ) final OperationFuture<Boolean> futureMock = mock( OperationFuture.class ); when( futureMock.get( anyInt(), any( TimeUnit.class ) ) ).thenThrow(new ExecutionException(new RuntimeException("Simulated exception."))); when( _memcachedMock.set( eq( session.getId() ), anyInt(), any() ) ).thenReturn( futureMock ); final BackupResult backupResult = _service.backupSession( session.getIdInternal(), false, null ).get(); assertEquals(backupResult.getStatus(), BackupResultStatus.FAILURE); verify( _memcachedMock, times( 1 ) ).set( eq( session.getId() ), anyInt(), any() ); } /** * Test that sessions are only backuped if they are modified. * @throws ExecutionException * @throws InterruptedException */ @Test public void testOnlySendModifiedSessions() throws InterruptedException, ExecutionException { final MemcachedBackupSession session = createSession( _service ); /* simulate the first request, with session access */ session.access(); session.endAccess(); session.setAttribute( "foo", "bar" ); _service.backupSession( session.getIdInternal(), false, null ).get(); verify( _memcachedMock, times( 1 ) ).set( eq( session.getId() ), anyInt(), any() ); // we need some millis between last backup and next access (due to check in BackupSessionService) Thread.sleep(5L); /* simulate the second request, with session access */ session.access(); session.endAccess(); session.setAttribute( "foo", "bar" ); session.setAttribute( "bar", "baz" ); _service.backupSession( session.getIdInternal(), false, null ).get(); verify( _memcachedMock, times( 2 ) ).set( eq( session.getId() ), anyInt(), any() ); // we need some millis between last backup and next access (due to check in BackupSessionService) Thread.sleep(5L); /* simulate the third request, without session access */ _service.backupSession( session.getIdInternal(), false, null ).get(); verify( _memcachedMock, times( 2 ) ).set( eq( session.getId() ), anyInt(), any() ); } /** * Test that session attribute serialization and hash calculation is only * performed if session attributes were accessed since the last backup. * Otherwise this computing time shall be saved for a better world :-) * @throws ExecutionException * @throws InterruptedException */ @Test public void testOnlyHashAttributesOfAccessedAttributes() throws InterruptedException, ExecutionException { final TranscoderService transcoderServiceMock = mock( TranscoderService.class ); @SuppressWarnings( "unchecked" ) final Map<String, Object> anyMap = any( Map.class ); when( transcoderServiceMock.serializeAttributes( any( MemcachedBackupSession.class ), anyMap ) ).thenReturn( new byte[0] ); _service.setTranscoderService( transcoderServiceMock ); final MemcachedBackupSession session = createSession( _service ); session.access(); session.endAccess(); session.setAttribute( "foo", "bar" ); _service.backupSession( session.getIdInternal(), false, null ).get(); verify( transcoderServiceMock, times( 1 ) ).serializeAttributes( eq( session ), eq( session.getAttributesInternal() ) ); session.access(); session.endAccess(); _service.backupSession( session.getIdInternal(), false, null ).get(); verify( transcoderServiceMock, times( 1 ) ).serializeAttributes( eq( session ), eq( session.getAttributesInternal() ) ); } /** * Test that session attribute serialization and hash calculation is only * performed if the session and its attributes were accessed since the last backup/backup check. * Otherwise this computing time shall be saved for a better world :-) * @throws ExecutionException * @throws InterruptedException */ @Test public void testOnlyHashAttributesOfAccessedSessionsAndAttributes() throws InterruptedException, ExecutionException { final TranscoderService transcoderServiceMock = mock( TranscoderService.class ); @SuppressWarnings( "unchecked" ) final Map<String, Object> anyMap = any( Map.class ); when( transcoderServiceMock.serializeAttributes( any( MemcachedBackupSession.class ), anyMap ) ).thenReturn( new byte[0] ); _service.setTranscoderService( transcoderServiceMock ); final MemcachedBackupSession session = createSession( _service ); session.setAttribute( "foo", "bar" ); _service.backupSession( session.getIdInternal(), false, null ).get(); verify( transcoderServiceMock, times( 1 ) ).serializeAttributes( eq( session ), eq( session.getAttributesInternal() ) ); // we need some millis between last backup and next access (due to check in BackupSessionService) Thread.sleep(5L); session.access(); session.getAttribute( "foo" ); _service.backupSession( session.getIdInternal(), false, null ).get(); verify( transcoderServiceMock, times( 2 ) ).serializeAttributes( eq( session ), eq( session.getAttributesInternal() ) ); // we need some millis between last backup and next access (due to check in BackupSessionService) Thread.sleep(5L); _service.backupSession( session.getIdInternal(), false, null ).get(); verify( transcoderServiceMock, times( 2 ) ).serializeAttributes( eq( session ), eq( session.getAttributesInternal() ) ); } /** * Test for issue #68: External change of sessionId must be handled correctly. * * When the webapp is configured with BASIC auth the sessionId is changed on login since 6.0.21 * (AuthenticatorBase.register invokes manager.changeSessionId(session)). * This change of the sessionId was not recognized by msm so that it might have happened that the * session is removed from memcached under the old id but not sent to memcached (if the case the session * was not accessed during this request at all, which is very unprobable but who knows). */ @Test( dataProviderClass = TestUtils.class, dataProvider = STICKYNESS_PROVIDER ) public void testChangeSessionId( final SessionAffinityMode stickyness ) throws InterruptedException, ExecutionException, TimeoutException { _service.setStickyInternal( stickyness.isSticky() ); if ( !stickyness.isSticky() ) { _service.setLockingMode( LockingMode.NONE, null, false ); } final MemcachedBackupSession session = createSession( _service ); session.setAttribute( "foo", "bar" ); _service.backupSession( session.getIdInternal(), false, "foo" ).get(); final String oldSessionId = session.getId(); _service.getManager().changeSessionId( session ); // on session backup we specify sessionIdChanged as false as we're not aware of this fact _service.backupSession( session.getIdInternal(), false, "foo" ); // remove session with old id and add it with the new id verify( _memcachedMock, times( 1 ) ).delete( eq( oldSessionId ) ); verify( _memcachedMock, times( 1 ) ).set( eq( session.getId() ), anyInt(), any() ); if ( !stickyness.isSticky() ) { Thread.sleep(200l); // check validity info verify( _memcachedMock, times( 1 ) ).delete( eq( new SessionIdFormat().createValidityInfoKeyName( oldSessionId ) ) ); verify( _memcachedMock, times( 1 ) ).set( eq( new SessionIdFormat().createValidityInfoKeyName( session.getId() ) ), anyInt(), any() ); } } /** * Test that sessions with a timeout of 0 or less are stored in memcached with unlimited * expiration time (0) also (see http://code.sixapart.com/svn/memcached/trunk/server/doc/protocol.txt). * For non-sticky sessions that must hold true for all related items stored in memcached (validation, * backup etc.) * * This is the test for issue #88 "Support session-timeout of 0 or less (no session expiration)" * http://code.google.com/p/memcached-session-manager/issues/detail?id=88 */ @Test( dataProviderClass = TestUtils.class, dataProvider = STICKYNESS_PROVIDER ) public void testSessionTimeoutUnlimitedWithSessionLoaded( final SessionAffinityMode stickyness ) throws InterruptedException, ExecutionException, LifecycleException { _service.setStickyInternal( stickyness.isSticky() ); if ( !stickyness.isSticky() ) { _service.setLockingMode( LockingMode.NONE, null, false ); _service.setMemcachedNodes( "n1:127.0.0.1:11211 n2:127.0.0.1:11212" ); // for backup support _service.startInternal(_memcachedMock); // we must put in our mock again } final MemcachedBackupSession session = createSession( _service ); session.setMaxInactiveInterval( -1 ); session.access(); session.endAccess(); session.setAttribute( "foo", "bar" ); final String sessionId = session.getId(); _service.backupSession( sessionId, false, null ).get(); verify( _memcachedMock, times( 1 ) ).set( eq( sessionId ), eq( 0 ), any() ); if ( !stickyness.isSticky() ) { // check validity info final String validityKey = new SessionIdFormat().createValidityInfoKeyName( sessionId ); verify( _memcachedMock, times( 1 ) ).set( eq( validityKey ), eq( 0 ), any() ); // As the backup is done asynchronously, we shutdown the executor so that we know the backup // task is executed/finished. _service.getLockingStrategy().getExecutorService().shutdown(); // On windows we need to wait a little bit so that the tasks _really_ have finished (not needed on linux) Thread.sleep(15); final String backupSessionKey = new SessionIdFormat().createBackupKey( sessionId ); verify( _memcachedMock, times( 1 ) ).set( eq( backupSessionKey ), eq( 0 ), any() ); final String backupValidityKey = new SessionIdFormat().createBackupKey( validityKey ); verify( _memcachedMock, times( 1 ) ).set( eq( backupValidityKey ), eq( 0 ), any() ); } } /** * Test that non-sticky sessions with a timeout of 0 or less that have not been loaded by a request * the validity info is stored in memcached with unlimited * expiration time (0) also (see http://code.sixapart.com/svn/memcached/trunk/server/doc/protocol.txt). * For non-sticky sessions that must hold true for all related items stored in memcached (validation, * backup etc.) * * This is the test for issue #88 "Support session-timeout of 0 or less (no session expiration)" * http://code.google.com/p/memcached-session-manager/issues/detail?id=88 */ @Test public void testSessionTimeoutUnlimitedWithNonStickySessionNotLoaded() throws InterruptedException, ExecutionException, LifecycleException, TimeoutException { _service.setStickyInternal( false ); _service.setLockingMode( LockingMode.NONE, null, false ); _service.setMemcachedNodes( "n1:127.0.0.1:11211 n2:127.0.0.1:11212" ); // for backup support _service.startInternal(_memcachedMock); // we must put in our mock again final String sessionId = "someSessionNotLoaded-n1"; // stub loading of validity info final String validityKey = new SessionIdFormat().createValidityInfoKeyName( sessionId ); final byte[] validityData = encode( -1, System.currentTimeMillis(), System.currentTimeMillis() ); when( _memcachedMock.get( eq( validityKey ) ) ).thenReturn( validityData ); // stub session (backup) ping @SuppressWarnings( "unchecked" ) final OperationFuture<Boolean> futureMock = mock( OperationFuture.class ); when( futureMock.get() ).thenReturn( Boolean.FALSE ); when( futureMock.get( anyInt(), any( TimeUnit.class ) ) ).thenReturn( Boolean.FALSE ); when( _memcachedMock.add( any( String.class ), anyInt(), any() ) ).thenReturn( futureMock ); _service.backupSession( sessionId, false, null ).get(); // update validity info verify( _memcachedMock, times( 1 ) ).set( eq( validityKey ), eq( 0 ), any() ); // As the backup is done asynchronously, we shutdown the executor so that we know the backup // task is executed/finished. _service.getLockingStrategy().getExecutorService().shutdown(); // On windows we need to wait a little bit so that the tasks _really_ have finished (not needed on linux) Thread.sleep(15); // ping session verify( _memcachedMock, times( 1 ) ).add( eq( sessionId ), anyInt(), any() ); // ping session backup final String backupSessionKey = new SessionIdFormat().createBackupKey( sessionId ); verify( _memcachedMock, times( 1 ) ).add( eq( backupSessionKey ), anyInt(), any() ); // update validity backup final String backupValidityKey = new SessionIdFormat().createBackupKey( validityKey ); verify( _memcachedMock, times( 1 ) ).set( eq( backupValidityKey ), eq( 0 ), any() ); } /** * Tests sessionAttributeFilter attribute: when excluded attributes are accessed/put the session should * not be marked as touched. */ @SuppressWarnings( "unchecked" ) @Test public void testOnlyHashAttributesOfAccessedFilteredAttributes() throws InterruptedException, ExecutionException { final TranscoderService transcoderServiceMock = mock( TranscoderService.class ); _service.setTranscoderService( transcoderServiceMock ); final MemcachedBackupSession session = createSession( _service ); _service.setSessionAttributeFilter( "^(foo|bar)$" ); session.setAttribute( "baz", "baz" ); session.access(); session.endAccess(); _service.backupSession( session.getIdInternal(), false, null ).get(); verify( transcoderServiceMock, never() ).serializeAttributes( (MemcachedBackupSession)any(), anyMap() ); } /** * Tests sessionAttributeFilter attribute: only filtered/allowed attributes must be serialized. */ @SuppressWarnings( { "unchecked", "rawtypes" } ) @Test public void testOnlyFilteredAttributesAreIncludedInSessionBackup() throws InterruptedException, ExecutionException { final TranscoderService transcoderServiceMock = mock( TranscoderService.class ); final Map<String, Object> anyMap = any( Map.class ); when( transcoderServiceMock.serializeAttributes( any( MemcachedBackupSession.class ), anyMap ) ).thenReturn( new byte[0] ); _service.setTranscoderService( transcoderServiceMock ); final MemcachedBackupSession session = createSession( _service ); _service.setSessionAttributeFilter( "^(foo|bar)$" ); session.setAttribute( "foo", "foo" ); session.setAttribute( "bar", "bar" ); session.setAttribute( "baz", "baz" ); _service.backupSession( session.getIdInternal(), false, null ).get(); // capture the supplied argument, alternatively we could have used some Matcher (but there seems to be no MapMatcher). final ArgumentCaptor<Map> model = ArgumentCaptor.forClass( Map.class ); verify( transcoderServiceMock, times( 1 ) ).serializeAttributes( eq( session ), model.capture() ); // the serialized attributes must only contain allowed ones assertTrue( model.getValue().containsKey( "foo" ) ); assertTrue( model.getValue().containsKey( "bar" ) ); assertFalse( model.getValue().containsKey( "baz" ) ); } /** * Tests sessionAttributeFilter attribute: only filtered/allowed attributes must be serialized in updateExpirationInMemcached. */ @SuppressWarnings( { "unchecked", "rawtypes" } ) @Test public void testOnlyFilteredAttributesAreIncludedDuringUpdateExpiration() throws InterruptedException, ExecutionException { final TranscoderService transcoderServiceMock = mock( TranscoderService.class ); final Map<String, Object> anyMap = any( Map.class ); when( transcoderServiceMock.serializeAttributes( any( MemcachedBackupSession.class ), anyMap ) ).thenReturn( new byte[0] ); _service.setTranscoderService( transcoderServiceMock ); final MemcachedBackupSession session = createSession( _service ); _service.setSessionAttributeFilter( "^(foo|bar)$" ); session.setAttribute( "foo", "foo" ); session.setAttribute( "bar", "bar" ); session.setAttribute( "baz", "baz" ); session.access(); session.endAccess(); _service.updateExpirationInMemcached(); // capture the supplied argument, alternatively we could have used some Matcher (but there seems to be no MapMatcher). final ArgumentCaptor<Map> model = ArgumentCaptor.forClass( Map.class ); verify( transcoderServiceMock, times( 1 ) ).serializeAttributes( eq( session ), model.capture() ); // the serialized attributes must only contain allowed ones assertTrue( model.getValue().containsKey( "foo" ) ); assertTrue( model.getValue().containsKey( "bar" ) ); assertFalse( model.getValue().containsKey( "baz" ) ); } @Test public void testSessionsRefCountHandlingIssue111() throws Exception { _service.setSticky(false); _service.setLockingMode(LockingMode.ALL.name()); final TranscoderService transcoderService = new TranscoderService(new JavaSerializationTranscoder()); _service.setTranscoderService( transcoderService ); _service.setMemcachedClient(_memcachedMock); _service.startInternal(); @SuppressWarnings("unchecked") final OperationFuture<Boolean> addResultMock = mock(OperationFuture.class); when(addResultMock.get()).thenReturn(true); when(addResultMock.get(anyLong(), any(TimeUnit.class))).thenReturn(true); when(_memcachedMock.add(anyString(), anyInt(), any(TimeUnit.class))).thenReturn(addResultMock); final MemcachedBackupSession session = createSession( _service ); // the session is now already added to the internal session map assertNotNull(session.getId()); Future<BackupResult> result = _service.backupSession(session.getId(), false, null); assertFalse(_service.getManager().getSessionsInternal().containsKey(session.getId())); // start another request that loads the session from mc final Request requestMock = mock(Request.class); when(requestMock.getNote(eq(RequestTrackingContextValve.INVOKED))).thenReturn(Boolean.TRUE); _service.getTrackingHostValve().storeRequestThreadLocal(requestMock); when(_memcachedMock.get(eq(session.getId()))).thenReturn(transcoderService.serialize(session)); final MemcachedBackupSession session2 = _service.findSession(session.getId()); assertTrue(session2.isLocked()); assertEquals(session2.getRefCount(), 1); session2.setAttribute("foo", "bar"); final CyclicBarrier barrier = new CyclicBarrier(2); // the session is now in the internal session map, // now let's run a concurrent request final Future<BackupResult> request2 = _executor.submit(new Callable<BackupResult>() { @Override public BackupResult call() throws Exception { final MemcachedBackupSession session3 = _service.findSession(session.getId()); assertSame(session3, session2); assertEquals(session3.getRefCount(), 2); // let the other thread proceed (or wait) barrier.await(); // and wait again so that the other thread can do some work barrier.await(); final Future<BackupResult> result = _service.backupSession(session.getId(), false, null); _service.getTrackingHostValve().resetRequestThreadLocal(); assertEquals(result.get().getStatus(), BackupResultStatus.SUCCESS); // The session should be released now and no longer stored assertFalse(_service.getManager().getSessionsInternal().containsKey(session.getId())); // just some double checking on expectations... assertEquals(session2.getRefCount(), 0); return result.get(); } }); barrier.await(); result = _service.backupSession(session.getId(), false, null); _service.getTrackingHostValve().resetRequestThreadLocal(); assertEquals(result.get().getStatus(), BackupResultStatus.SKIPPED); // This is the important point! assertTrue(_service.getManager().getSessionsInternal().containsKey(session.getId())); // just some double checking on expectations... assertEquals(session2.getRefCount(), 1); // now let the other thread proceed barrier.await(); // and wait for the result, also to get exceptions/assertion errors. request2.get(); } @Test public void testInvalidNonStickySessionDoesNotCallOnBackupWithoutLoadedSessionIssue137() throws Exception { _service.setStickyInternal( false ); _service.setLockingMode( LockingMode.NONE, null, false ); _service.startInternal(_memcachedMock); // we must put in our mock again final String sessionId = "nonStickySessionToTimeOut-n1"; // For findSession needed final Request requestMock = mock(Request.class); when(requestMock.getNote(eq(RequestTrackingContextValve.INVOKED))).thenReturn(Boolean.TRUE); _service.getTrackingHostValve().storeRequestThreadLocal(requestMock); final MemcachedBackupSession session = _service.findSession(sessionId); assertNull(session); _service.backupSession( sessionId, false, null ).get(); // check that validity info is not loaded - this would trigger the // WARNING: Found no validity info for session id ... final String validityKey = new SessionIdFormat().createValidityInfoKeyName( sessionId ); verify( _memcachedMock, times( 0 ) ).get( eq( validityKey ) ); } }
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.apache.impala.service; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.Enumeration; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.s3a.S3AFileSystem; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DistributedFileSystem; import org.apache.impala.analysis.DescriptorTable; import org.apache.impala.analysis.ToSqlUtils; import org.apache.impala.authorization.AuthorizationConfig; import org.apache.impala.authorization.ImpalaInternalAdminUser; import org.apache.impala.authorization.User; import org.apache.impala.catalog.DataSource; import org.apache.impala.catalog.Db; import org.apache.impala.catalog.Function; import org.apache.impala.catalog.Role; import org.apache.impala.catalog.StructType; import org.apache.impala.catalog.Type; import org.apache.impala.common.FileSystemUtil; import org.apache.impala.common.ImpalaException; import org.apache.impala.common.InternalException; import org.apache.impala.common.JniUtil; import org.apache.impala.thrift.TBackendGflags; import org.apache.impala.thrift.TBuildTestDescriptorTableParams; import org.apache.impala.thrift.TCatalogObject; import org.apache.impala.thrift.TDatabase; import org.apache.impala.thrift.TDescribeDbParams; import org.apache.impala.thrift.TDescribeOutputStyle; import org.apache.impala.thrift.TDescribeResult; import org.apache.impala.thrift.TDescribeTableParams; import org.apache.impala.thrift.TDescriptorTable; import org.apache.impala.thrift.TExecRequest; import org.apache.impala.thrift.TFunctionCategory; import org.apache.impala.thrift.TGetAllHadoopConfigsResponse; import org.apache.impala.thrift.TGetDataSrcsParams; import org.apache.impala.thrift.TGetDataSrcsResult; import org.apache.impala.thrift.TGetDbsParams; import org.apache.impala.thrift.TGetDbsResult; import org.apache.impala.thrift.TGetFunctionsParams; import org.apache.impala.thrift.TGetFunctionsResult; import org.apache.impala.thrift.TGetHadoopConfigRequest; import org.apache.impala.thrift.TGetHadoopConfigResponse; import org.apache.impala.thrift.TGetTablesParams; import org.apache.impala.thrift.TGetTablesResult; import org.apache.impala.thrift.TLoadDataReq; import org.apache.impala.thrift.TLoadDataResp; import org.apache.impala.thrift.TLogLevel; import org.apache.impala.thrift.TMetadataOpRequest; import org.apache.impala.thrift.TQueryCtx; import org.apache.impala.thrift.TResultSet; import org.apache.impala.thrift.TShowFilesParams; import org.apache.impala.thrift.TShowGrantRoleParams; import org.apache.impala.thrift.TShowRolesParams; import org.apache.impala.thrift.TShowRolesResult; import org.apache.impala.thrift.TShowStatsOp; import org.apache.impala.thrift.TShowStatsParams; import org.apache.impala.thrift.TTableName; import org.apache.impala.thrift.TUniqueId; import org.apache.impala.thrift.TUpdateCatalogCacheRequest; import org.apache.impala.thrift.TUpdateMembershipRequest; import org.apache.impala.util.GlogAppender; import org.apache.impala.util.PatternMatcher; import org.apache.impala.util.TSessionStateUtil; import org.apache.log4j.Appender; import org.apache.log4j.FileAppender; import org.apache.thrift.TException; import org.apache.thrift.TSerializer; import org.apache.thrift.protocol.TBinaryProtocol; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; /** * JNI-callable interface onto a wrapped Frontend instance. The main point is to serialise * and deserialise thrift structures between C and Java. */ public class JniFrontend { private final static Logger LOG = LoggerFactory.getLogger(JniFrontend.class); private final static TBinaryProtocol.Factory protocolFactory_ = new TBinaryProtocol.Factory(); private final Frontend frontend_; /** * Create a new instance of the Jni Frontend. */ public JniFrontend(byte[] thriftBackendConfig) throws ImpalaException, TException { TBackendGflags cfg = new TBackendGflags(); JniUtil.deserializeThrift(protocolFactory_, cfg, thriftBackendConfig); BackendConfig.create(cfg); GlogAppender.Install(TLogLevel.values()[cfg.impala_log_lvl], TLogLevel.values()[cfg.non_impala_java_vlog]); // Validate the authorization configuration before initializing the Frontend. // If there are any configuration problems Impala startup will fail. AuthorizationConfig authConfig = new AuthorizationConfig(cfg.server_name, cfg.authorization_policy_file, cfg.sentry_config, cfg.authorization_policy_provider_class); authConfig.validateConfig(); if (authConfig.isEnabled()) { LOG.info(String.format("Authorization is 'ENABLED' using %s", authConfig.isFileBasedPolicy() ? " file based policy from: " + authConfig.getPolicyFile() : " using Sentry Policy Service.")); } else { LOG.info("Authorization is 'DISABLED'."); } LOG.info(JniUtil.getJavaVersion()); frontend_ = new Frontend(authConfig, cfg.kudu_master_hosts); } /** * Jni wrapper for Frontend.createExecRequest(). Accepts a serialized * TQueryContext; returns a serialized TQueryExecRequest. */ public byte[] createExecRequest(byte[] thriftQueryContext) throws ImpalaException { TQueryCtx queryCtx = new TQueryCtx(); JniUtil.deserializeThrift(protocolFactory_, queryCtx, thriftQueryContext); StringBuilder explainString = new StringBuilder(); TExecRequest result = frontend_.createExecRequest(queryCtx, explainString); if (explainString.length() > 0 && LOG.isTraceEnabled()) { LOG.trace(explainString.toString()); } // TODO: avoid creating serializer for each query? TSerializer serializer = new TSerializer(protocolFactory_); try { return serializer.serialize(result); } catch (TException e) { throw new InternalException(e.getMessage()); } } // Deserialize and merge each thrift catalog update into a single merged update public byte[] updateCatalogCache(byte[][] thriftCatalogUpdates) throws ImpalaException { TUniqueId defaultCatalogServiceId = new TUniqueId(0L, 0L); TUpdateCatalogCacheRequest mergedUpdateRequest = new TUpdateCatalogCacheRequest( false, defaultCatalogServiceId, new ArrayList<TCatalogObject>(), new ArrayList<TCatalogObject>()); for (byte[] catalogUpdate: thriftCatalogUpdates) { TUpdateCatalogCacheRequest incrementalRequest = new TUpdateCatalogCacheRequest(); JniUtil.deserializeThrift(protocolFactory_, incrementalRequest, catalogUpdate); mergedUpdateRequest.is_delta |= incrementalRequest.is_delta; if (!incrementalRequest.getCatalog_service_id().equals(defaultCatalogServiceId)) { mergedUpdateRequest.setCatalog_service_id( incrementalRequest.getCatalog_service_id()); } mergedUpdateRequest.getUpdated_objects().addAll( incrementalRequest.getUpdated_objects()); mergedUpdateRequest.getRemoved_objects().addAll( incrementalRequest.getRemoved_objects()); } TSerializer serializer = new TSerializer(protocolFactory_); try { return serializer.serialize(frontend_.updateCatalogCache(mergedUpdateRequest)); } catch (TException e) { throw new InternalException(e.getMessage()); } } /** * Jni wrapper for Frontend.updateMembership(). Accepts a serialized * TUpdateMembershipRequest. */ public void updateMembership(byte[] thriftMembershipUpdate) throws ImpalaException { TUpdateMembershipRequest req = new TUpdateMembershipRequest(); JniUtil.deserializeThrift(protocolFactory_, req, thriftMembershipUpdate); frontend_.updateMembership(req); } /** * Loads a table or partition with one or more data files. If the "overwrite" flag * in the request is true, all existing data in the table/partition will be replaced. * If the "overwrite" flag is false, the files will be added alongside any existing * data files. */ public byte[] loadTableData(byte[] thriftLoadTableDataParams) throws ImpalaException, IOException { TLoadDataReq request = new TLoadDataReq(); JniUtil.deserializeThrift(protocolFactory_, request, thriftLoadTableDataParams); TLoadDataResp response = frontend_.loadTableData(request); TSerializer serializer = new TSerializer(protocolFactory_); try { return serializer.serialize(response); } catch (TException e) { throw new InternalException(e.getMessage()); } } /** * Return an explain plan based on thriftQueryContext, a serialized TQueryContext. * This call is thread-safe. */ public String getExplainPlan(byte[] thriftQueryContext) throws ImpalaException { TQueryCtx queryCtx = new TQueryCtx(); JniUtil.deserializeThrift(protocolFactory_, queryCtx, thriftQueryContext); String plan = frontend_.getExplainString(queryCtx); if (LOG.isTraceEnabled()) LOG.trace("Explain plan: " + plan); return plan; } /** * Implement Hive's pattern-matching semantics for "SHOW TABLE [[LIKE] 'pattern']", and * return a list of table names matching an optional pattern. * The only metacharacters are '*' which matches any string of characters, and '|' * which denotes choice. Doing the work here saves loading tables or databases from the * metastore (which Hive would do if we passed the call through to the metastore * client). If the pattern is null, all strings are considered to match. If it is an * empty string, no strings match. * * The argument is a serialized TGetTablesParams object. * The return type is a serialised TGetTablesResult object. * @see Frontend#getTableNames */ public byte[] getTableNames(byte[] thriftGetTablesParams) throws ImpalaException { TGetTablesParams params = new TGetTablesParams(); JniUtil.deserializeThrift(protocolFactory_, params, thriftGetTablesParams); // If the session was not set it indicates this is an internal Impala call. User user = params.isSetSession() ? new User(TSessionStateUtil.getEffectiveUser(params.getSession())) : ImpalaInternalAdminUser.getInstance(); Preconditions.checkState(!params.isSetSession() || user != null ); List<String> tables = frontend_.getTableNames(params.db, PatternMatcher.createHivePatternMatcher(params.pattern), user); TGetTablesResult result = new TGetTablesResult(); result.setTables(tables); TSerializer serializer = new TSerializer(protocolFactory_); try { return serializer.serialize(result); } catch (TException e) { throw new InternalException(e.getMessage()); } } /** * Returns files info of a table or partition. * The argument is a serialized TShowFilesParams object. * The return type is a serialised TResultSet object. * @see Frontend#getTableFiles */ public byte[] getTableFiles(byte[] thriftShowFilesParams) throws ImpalaException { TShowFilesParams params = new TShowFilesParams(); JniUtil.deserializeThrift(protocolFactory_, params, thriftShowFilesParams); TResultSet result = frontend_.getTableFiles(params); TSerializer serializer = new TSerializer(protocolFactory_); try { return serializer.serialize(result); } catch (TException e) { throw new InternalException(e.getMessage()); } } /** * Implement Hive's pattern-matching semantics for "SHOW DATABASES [[LIKE] 'pattern']", * and return a list of databases matching an optional pattern. * @see JniFrontend#getTableNames(byte[]) for more detail. * * The argument is a serialized TGetDbParams object. * The return type is a serialised TGetDbResult object. * @see Frontend#getDbs */ public byte[] getDbs(byte[] thriftGetTablesParams) throws ImpalaException { TGetDbsParams params = new TGetDbsParams(); JniUtil.deserializeThrift(protocolFactory_, params, thriftGetTablesParams); // If the session was not set it indicates this is an internal Impala call. User user = params.isSetSession() ? new User(TSessionStateUtil.getEffectiveUser(params.getSession())) : ImpalaInternalAdminUser.getInstance(); List<Db> dbs = frontend_.getDbs( PatternMatcher.createHivePatternMatcher(params.pattern), user); TGetDbsResult result = new TGetDbsResult(); List<TDatabase> tDbs = Lists.newArrayListWithCapacity(dbs.size()); for (Db db: dbs) tDbs.add(db.toThrift()); result.setDbs(tDbs); TSerializer serializer = new TSerializer(protocolFactory_); try { return serializer.serialize(result); } catch (TException e) { throw new InternalException(e.getMessage()); } } /** * Returns a list of data sources matching an optional pattern. * The argument is a serialized TGetDataSrcsResult object. * The return type is a serialised TGetDataSrcsResult object. * @see Frontend#getDataSrcs */ public byte[] getDataSrcMetadata(byte[] thriftParams) throws ImpalaException { TGetDataSrcsParams params = new TGetDataSrcsParams(); JniUtil.deserializeThrift(protocolFactory_, params, thriftParams); TGetDataSrcsResult result = new TGetDataSrcsResult(); List<DataSource> dataSources = frontend_.getDataSrcs(params.pattern); result.setData_src_names(Lists.<String>newArrayListWithCapacity(dataSources.size())); result.setLocations(Lists.<String>newArrayListWithCapacity(dataSources.size())); result.setClass_names(Lists.<String>newArrayListWithCapacity(dataSources.size())); result.setApi_versions(Lists.<String>newArrayListWithCapacity(dataSources.size())); for (DataSource dataSource: dataSources) { result.addToData_src_names(dataSource.getName()); result.addToLocations(dataSource.getLocation()); result.addToClass_names(dataSource.getClassName()); result.addToApi_versions(dataSource.getApiVersion()); } TSerializer serializer = new TSerializer(protocolFactory_); try { return serializer.serialize(result); } catch (TException e) { throw new InternalException(e.getMessage()); } } public byte[] getStats(byte[] thriftShowStatsParams) throws ImpalaException { TShowStatsParams params = new TShowStatsParams(); JniUtil.deserializeThrift(protocolFactory_, params, thriftShowStatsParams); Preconditions.checkState(params.isSetTable_name()); TResultSet result; if (params.op == TShowStatsOp.COLUMN_STATS) { result = frontend_.getColumnStats(params.getTable_name().getDb_name(), params.getTable_name().getTable_name()); } else { result = frontend_.getTableStats(params.getTable_name().getDb_name(), params.getTable_name().getTable_name(), params.op); } TSerializer serializer = new TSerializer(protocolFactory_); try { return serializer.serialize(result); } catch (TException e) { throw new InternalException(e.getMessage()); } } /** * Returns a list of function names matching an optional pattern. * The argument is a serialized TGetFunctionsParams object. * The return type is a serialised TGetFunctionsResult object. * @see Frontend#getTableNames */ public byte[] getFunctions(byte[] thriftGetFunctionsParams) throws ImpalaException { TGetFunctionsParams params = new TGetFunctionsParams(); JniUtil.deserializeThrift(protocolFactory_, params, thriftGetFunctionsParams); TGetFunctionsResult result = new TGetFunctionsResult(); List<String> signatures = Lists.newArrayList(); List<String> retTypes = Lists.newArrayList(); List<String> fnBinaryTypes = Lists.newArrayList(); List<String> fnIsPersistent = Lists.newArrayList(); List<Function> fns = frontend_.getFunctions(params.category, params.db, params.pattern, false); for (Function fn: fns) { signatures.add(fn.signatureString()); retTypes.add(fn.getReturnType().toString()); fnBinaryTypes.add(fn.getBinaryType().name()); fnIsPersistent.add(String.valueOf(fn.isPersistent())); } result.setFn_signatures(signatures); result.setFn_ret_types(retTypes); result.setFn_binary_types(fnBinaryTypes); result.setFn_persistence(fnIsPersistent); TSerializer serializer = new TSerializer(protocolFactory_); try { return serializer.serialize(result); } catch (TException e) { throw new InternalException(e.getMessage()); } } /** * Gets the thrift representation of a catalog object. */ public byte[] getCatalogObject(byte[] thriftParams) throws ImpalaException, TException { TCatalogObject objectDescription = new TCatalogObject(); JniUtil.deserializeThrift(protocolFactory_, objectDescription, thriftParams); TSerializer serializer = new TSerializer(protocolFactory_); return serializer.serialize( frontend_.getCatalog().getTCatalogObject(objectDescription)); } /** * Returns a database's properties such as its location and comment. * The argument is a serialized TDescribeDbParams object. * The return type is a serialised TDescribeDbResult object. * @see Frontend#describeDb */ public byte[] describeDb(byte[] thriftDescribeDbParams) throws ImpalaException { TDescribeDbParams params = new TDescribeDbParams(); JniUtil.deserializeThrift(protocolFactory_, params, thriftDescribeDbParams); TDescribeResult result = frontend_.describeDb( params.getDb(), params.getOutput_style()); TSerializer serializer = new TSerializer(protocolFactory_); try { return serializer.serialize(result); } catch (TException e) { throw new InternalException(e.getMessage()); } } /** * Returns a list of the columns making up a table. * The argument is a serialized TDescribeParams object. * The return type is a serialised TDescribeResult object. * @see Frontend#describeTable */ public byte[] describeTable(byte[] thriftDescribeTableParams) throws ImpalaException { TDescribeTableParams params = new TDescribeTableParams(); JniUtil.deserializeThrift(protocolFactory_, params, thriftDescribeTableParams); Preconditions.checkState(params.isSetTable_name() ^ params.isSetResult_struct()); TDescribeResult result = null; if (params.isSetTable_name()) { result = frontend_.describeTable(params.getTable_name(), params.output_style); } else { Preconditions.checkState(params.output_style == TDescribeOutputStyle.MINIMAL); StructType structType = (StructType)Type.fromThrift(params.result_struct); result = DescribeResultFactory.buildDescribeMinimalResult(structType); } TSerializer serializer = new TSerializer(protocolFactory_); try { return serializer.serialize(result); } catch (TException e) { throw new InternalException(e.getMessage()); } } /** * Returns a SQL DDL string for creating the specified table. */ public String showCreateTable(byte[] thriftTableName) throws ImpalaException { TTableName params = new TTableName(); JniUtil.deserializeThrift(protocolFactory_, params, thriftTableName); return ToSqlUtils.getCreateTableSql(frontend_.getCatalog().getTable( params.getDb_name(), params.getTable_name())); } /** * Returns a SQL DDL string for creating the specified function. */ public String showCreateFunction(byte[] thriftShowCreateFunctionParams) throws ImpalaException { TGetFunctionsParams params = new TGetFunctionsParams(); JniUtil.deserializeThrift(protocolFactory_, params, thriftShowCreateFunctionParams); Preconditions.checkArgument(params.category == TFunctionCategory.SCALAR || params.category == TFunctionCategory.AGGREGATE); return ToSqlUtils.getCreateFunctionSql(frontend_.getFunctions( params.category, params.db, params.pattern, true)); } /** * Creates a thrift descriptor table for testing. */ public byte[] buildTestDescriptorTable(byte[] buildTestDescTblParams) throws ImpalaException { TBuildTestDescriptorTableParams params = new TBuildTestDescriptorTableParams(); JniUtil.deserializeThrift(protocolFactory_, params, buildTestDescTblParams); Preconditions.checkNotNull(params.slot_types); TDescriptorTable result = DescriptorTable.buildTestDescriptorTable(params.slot_types); TSerializer serializer = new TSerializer(protocolFactory_); try { byte[] ret = serializer.serialize(result); return ret; } catch (TException e) { throw new InternalException(e.getMessage()); } } /** * Gets all roles */ public byte[] getRoles(byte[] showRolesParams) throws ImpalaException { TShowRolesParams params = new TShowRolesParams(); JniUtil.deserializeThrift(protocolFactory_, params, showRolesParams); TShowRolesResult result = new TShowRolesResult(); List<Role> roles = Lists.newArrayList(); if (params.isIs_show_current_roles() || params.isSetGrant_group()) { User user = new User(params.getRequesting_user()); Set<String> groupNames; if (params.isIs_show_current_roles()) { groupNames = frontend_.getAuthzChecker().getUserGroups(user); } else { Preconditions.checkState(params.isSetGrant_group()); groupNames = Sets.newHashSet(params.getGrant_group()); } for (String groupName: groupNames) { roles.addAll(frontend_.getCatalog().getAuthPolicy().getGrantedRoles(groupName)); } } else { Preconditions.checkState(!params.isIs_show_current_roles()); roles = frontend_.getCatalog().getAuthPolicy().getAllRoles(); } result.setRole_names(Lists.<String>newArrayListWithExpectedSize(roles.size())); for (Role role: roles) { result.getRole_names().add(role.getName()); } Collections.sort(result.getRole_names()); TSerializer serializer = new TSerializer(protocolFactory_); try { return serializer.serialize(result); } catch (TException e) { throw new InternalException(e.getMessage()); } } public byte[] getRolePrivileges(byte[] showGrantRolesParams) throws ImpalaException { TShowGrantRoleParams params = new TShowGrantRoleParams(); JniUtil.deserializeThrift(protocolFactory_, params, showGrantRolesParams); TResultSet result = frontend_.getCatalog().getAuthPolicy().getRolePrivileges( params.getRole_name(), params.getPrivilege()); TSerializer serializer = new TSerializer(protocolFactory_); try { return serializer.serialize(result); } catch (TException e) { throw new InternalException(e.getMessage()); } } /** * Executes a HiveServer2 metadata operation and returns a TResultSet */ public byte[] execHiveServer2MetadataOp(byte[] metadataOpsParams) throws ImpalaException { TMetadataOpRequest params = new TMetadataOpRequest(); JniUtil.deserializeThrift(protocolFactory_, params, metadataOpsParams); TResultSet result = frontend_.execHiveServer2MetadataOp(params); TSerializer serializer = new TSerializer(protocolFactory_); try { return serializer.serialize(result); } catch (TException e) { throw new InternalException(e.getMessage()); } } public void setCatalogInitialized() { frontend_.getCatalog().setIsReady(true); } // Caching this saves ~50ms per call to getHadoopConfigAsHtml private static final Configuration CONF = new Configuration(); /** * Returns a string of all loaded Hadoop configuration parameters as a table of keys * and values. If asText is true, output in raw text. Otherwise, output in html. */ public byte[] getAllHadoopConfigs() throws ImpalaException { Map<String, String> configs = Maps.newHashMap(); for (Map.Entry<String, String> e: CONF) { configs.put(e.getKey(), e.getValue()); } TGetAllHadoopConfigsResponse result = new TGetAllHadoopConfigsResponse(); result.setConfigs(configs); TSerializer serializer = new TSerializer(protocolFactory_); try { return serializer.serialize(result); } catch (TException e) { throw new InternalException(e.getMessage()); } } /** * Returns the corresponding config value for the given key as a serialized * TGetHadoopConfigResponse. If the config value is null, the 'value' field in the * thrift response object will not be set. */ public byte[] getHadoopConfig(byte[] serializedRequest) throws ImpalaException { TGetHadoopConfigRequest request = new TGetHadoopConfigRequest(); JniUtil.deserializeThrift(protocolFactory_, request, serializedRequest); TGetHadoopConfigResponse result = new TGetHadoopConfigResponse(); result.setValue(CONF.get(request.getName())); TSerializer serializer = new TSerializer(protocolFactory_); try { return serializer.serialize(result); } catch (TException e) { throw new InternalException(e.getMessage()); } } /** * Returns an error string describing all configuration issues. If no config issues are * found, returns an empty string. */ public String checkConfiguration() { StringBuilder output = new StringBuilder(); output.append(checkLogFilePermission()); output.append(checkFileSystem(CONF)); output.append(checkShortCircuitRead(CONF)); return output.toString(); } /** * Returns an empty string if Impala has permission to write to FE log files. If not, * returns an error string describing the issues. */ private String checkLogFilePermission() { org.apache.log4j.Logger l4jRootLogger = org.apache.log4j.Logger.getRootLogger(); Enumeration appenders = l4jRootLogger.getAllAppenders(); while (appenders.hasMoreElements()) { Appender appender = (Appender) appenders.nextElement(); if (appender instanceof FileAppender) { if (((FileAppender) appender).getFile() == null) { // If Impala does not have permission to write to the log file, the // FileAppender will fail to initialize and logFile will be null. // Unfortunately, we can't get the log file name here. return "Impala does not have permission to write to the log file specified " + "in log4j.properties."; } } } return ""; } /** * Returns an error message if short circuit reads are enabled but misconfigured. * Otherwise, returns an empty string, */ private String checkShortCircuitRead(Configuration conf) { if (!conf.getBoolean(DFSConfigKeys.DFS_CLIENT_READ_SHORTCIRCUIT_KEY, DFSConfigKeys.DFS_CLIENT_READ_SHORTCIRCUIT_DEFAULT)) { LOG.info("Short-circuit reads are not enabled."); return ""; } StringBuilder output = new StringBuilder(); String errorMessage = "Invalid short-circuit reads configuration:\n"; String prefix = " - "; StringBuilder errorCause = new StringBuilder(); // dfs.domain.socket.path must be set properly String domainSocketPath = conf.getTrimmed(DFSConfigKeys.DFS_DOMAIN_SOCKET_PATH_KEY, DFSConfigKeys.DFS_DOMAIN_SOCKET_PATH_DEFAULT); if (domainSocketPath.isEmpty()) { errorCause.append(prefix); errorCause.append(DFSConfigKeys.DFS_DOMAIN_SOCKET_PATH_KEY); errorCause.append(" is not configured.\n"); } else { // The socket path parent directory must be readable and executable. File socketFile = new File(domainSocketPath); File socketDir = socketFile.getParentFile(); if (socketDir == null || !socketDir.canRead() || !socketDir.canExecute()) { errorCause.append(prefix); errorCause.append("Impala cannot read or execute the parent directory of "); errorCause.append(DFSConfigKeys.DFS_DOMAIN_SOCKET_PATH_KEY); errorCause.append("\n"); } } // dfs.client.use.legacy.blockreader.local must be set to false if (conf.getBoolean(DFSConfigKeys.DFS_CLIENT_USE_LEGACY_BLOCKREADERLOCAL, DFSConfigKeys.DFS_CLIENT_USE_LEGACY_BLOCKREADERLOCAL_DEFAULT)) { errorCause.append(prefix); errorCause.append(DFSConfigKeys.DFS_CLIENT_USE_LEGACY_BLOCKREADERLOCAL); errorCause.append(" should not be enabled.\n"); } if (errorCause.length() > 0) { output.append(errorMessage); output.append(errorCause); } return output.toString(); } /** * Return an empty string if the default FileSystem configured in CONF refers to a * DistributedFileSystem and Impala can list the root directory "/". Otherwise, * return an error string describing the issues. */ private String checkFileSystem(Configuration conf) { try { FileSystem fs = FileSystem.get(CONF); if (!(fs instanceof DistributedFileSystem || fs instanceof S3AFileSystem)) { return "Currently configured default filesystem: " + fs.getClass().getSimpleName() + ". " + CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY + " (" + CONF.get(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY) + ")" + " is not supported."; } } catch (IOException e) { return "couldn't retrieve FileSystem:\n" + e.getMessage(); } try { FileSystemUtil.getTotalNumVisibleFiles(new Path("/")); } catch (IOException e) { return "Could not read the root directory at " + CONF.get(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY) + ". Error was: \n" + e.getMessage(); } return ""; } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version * 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package org.apache.storm.kafka.bolt; import com.google.common.collect.ImmutableList; import java.lang.reflect.Field; import java.nio.ByteBuffer; import java.util.HashMap; import java.util.List; import java.util.Properties; import java.util.concurrent.Future; import kafka.api.FetchRequest; import kafka.api.OffsetRequest; import kafka.javaapi.FetchResponse; import kafka.javaapi.OffsetResponse; import kafka.javaapi.consumer.SimpleConsumer; import kafka.javaapi.message.ByteBufferMessageSet; import kafka.message.Message; import kafka.message.MessageAndOffset; import org.apache.kafka.clients.producer.Callback; import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.ProducerRecord; import org.apache.storm.Config; import org.apache.storm.Constants; import org.apache.storm.kafka.Broker; import org.apache.storm.kafka.BrokerHosts; import org.apache.storm.kafka.KafkaConfig; import org.apache.storm.kafka.KafkaTestBroker; import org.apache.storm.kafka.KafkaUtils; import org.apache.storm.kafka.Partition; import org.apache.storm.kafka.StaticHosts; import org.apache.storm.kafka.trident.GlobalPartitionInformation; import org.apache.storm.task.GeneralTopologyContext; import org.apache.storm.task.IOutputCollector; import org.apache.storm.task.OutputCollector; import org.apache.storm.topology.TopologyBuilder; import org.apache.storm.tuple.Fields; import org.apache.storm.tuple.Tuple; import org.apache.storm.tuple.TupleImpl; import org.apache.storm.tuple.Values; import org.apache.storm.utils.TupleUtils; import org.apache.storm.utils.Utils; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.any; import static org.mockito.Mockito.anyInt; import static org.mockito.Mockito.anyString; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; public class KafkaBoltTest { private static final String TEST_TOPIC = "test-topic"; private KafkaTestBroker broker; private KafkaBolt bolt; private Config config = new Config(); private KafkaConfig kafkaConfig; private SimpleConsumer simpleConsumer; @Mock private IOutputCollector collector; private static ByteBufferMessageSet mockSingleMessage(byte[] key, byte[] message) { ByteBufferMessageSet sets = mock(ByteBufferMessageSet.class); MessageAndOffset msg = mock(MessageAndOffset.class); final List<MessageAndOffset> msgs = ImmutableList.of(msg); doReturn(msgs.iterator()).when(sets).iterator(); Message kafkaMessage = mock(Message.class); doReturn(ByteBuffer.wrap(key)).when(kafkaMessage).key(); doReturn(ByteBuffer.wrap(message)).when(kafkaMessage).payload(); doReturn(kafkaMessage).when(msg).message(); return sets; } private static SimpleConsumer mockSimpleConsumer(ByteBufferMessageSet mockMsg) { SimpleConsumer simpleConsumer = mock(SimpleConsumer.class); FetchResponse resp = mock(FetchResponse.class); doReturn(resp).when(simpleConsumer).fetch(any(FetchRequest.class)); OffsetResponse mockOffsetResponse = mock(OffsetResponse.class); doReturn(new long[]{}).when(mockOffsetResponse).offsets(anyString(), anyInt()); doReturn(mockOffsetResponse).when(simpleConsumer).getOffsetsBefore(any(kafka.javaapi.OffsetRequest.class)); doReturn(mockMsg).when(resp).messageSet(anyString(), anyInt()); return simpleConsumer; } @Before public void initMocks() { MockitoAnnotations.initMocks(this); broker = new KafkaTestBroker(); setupKafkaConsumer(); config.put(KafkaBolt.TOPIC, TEST_TOPIC); bolt = generateStringSerializerBolt(); } @After public void shutdown() { simpleConsumer.close(); broker.shutdown(); bolt.cleanup(); } private void setupKafkaConsumer() { GlobalPartitionInformation globalPartitionInformation = new GlobalPartitionInformation(TEST_TOPIC); globalPartitionInformation.addPartition(0, Broker.fromString(broker.getBrokerConnectionString())); BrokerHosts brokerHosts = new StaticHosts(globalPartitionInformation); kafkaConfig = new KafkaConfig(brokerHosts, TEST_TOPIC); simpleConsumer = new SimpleConsumer("localhost", broker.getPort(), 60000, 1024, "testClient"); } @Test public void shouldNotAcknowledgeTickTuples() throws Exception { // Given Tuple tickTuple = mockTickTuple(); // When bolt.execute(tickTuple); // Then verify(collector, never()).ack(tickTuple); } @Test public void executeWithKey() throws Exception { String message = "value-123"; String key = "key-123"; Tuple tuple = generateTestTuple(key, message); bolt.execute(tuple); verify(collector).ack(tuple); verifyMessage(key, message); } /* test synchronous sending */ @Test public void executeWithByteArrayKeyAndMessageSync() throws Exception { boolean async = false; boolean fireAndForget = false; bolt = generateDefaultSerializerBolt(async, fireAndForget, null); String keyString = "test-key"; String messageString = "test-message"; byte[] key = keyString.getBytes(); byte[] message = messageString.getBytes(); Tuple tuple = generateTestTuple(key, message); bolt.execute(tuple); verify(collector).ack(tuple); verifyMessage(keyString, messageString); } /* test asynchronous sending (default) */ @Test public void executeWithByteArrayKeyAndMessageAsync() throws Exception { boolean async = true; boolean fireAndForget = false; String keyString = "test-key"; String messageString = "test-message"; byte[] key = keyString.getBytes(); byte[] message = messageString.getBytes(); final Tuple tuple = generateTestTuple(key, message); final ByteBufferMessageSet mockMsg = mockSingleMessage(key, message); simpleConsumer.close(); simpleConsumer = mockSimpleConsumer(mockMsg); KafkaProducer<?, ?> producer = mock(KafkaProducer.class); when(producer.send(any(ProducerRecord.class), any(Callback.class))).thenAnswer(new Answer<Future>() { @Override public Future answer(InvocationOnMock invocationOnMock) throws Throwable { Callback cb = (Callback) invocationOnMock.getArguments()[1]; cb.onCompletion(null, null); return mock(Future.class); } }); bolt = generateDefaultSerializerBolt(async, fireAndForget, producer); bolt.execute(tuple); verify(collector).ack(tuple); verifyMessage(keyString, messageString); } /* test with fireAndForget option enabled */ @Test public void executeWithByteArrayKeyAndMessageFire() throws Exception { boolean async = true; boolean fireAndForget = true; bolt = generateDefaultSerializerBolt(async, fireAndForget, null); String keyString = "test-key"; String messageString = "test-message"; byte[] key = keyString.getBytes(); byte[] message = messageString.getBytes(); Tuple tuple = generateTestTuple(key, message); final ByteBufferMessageSet mockMsg = mockSingleMessage(key, message); simpleConsumer.close(); simpleConsumer = mockSimpleConsumer(mockMsg); KafkaProducer<?, ?> producer = mock(KafkaProducer.class); // do not invoke the callback of send() in order to test whether the bolt handle the fireAndForget option // properly. doReturn(mock(Future.class)).when(producer).send(any(ProducerRecord.class), any(Callback.class)); bolt.execute(tuple); verify(collector).ack(tuple); verifyMessage(keyString, messageString); } /* test bolt specified properties */ @Test public void executeWithBoltSpecifiedProperties() { boolean async = false; boolean fireAndForget = false; bolt = defaultSerializerBoltWithSpecifiedProperties(async, fireAndForget); String keyString = "test-key"; String messageString = "test-message"; byte[] key = keyString.getBytes(); byte[] message = messageString.getBytes(); Tuple tuple = generateTestTuple(key, message); bolt.execute(tuple); verify(collector).ack(tuple); verifyMessage(keyString, messageString); } private KafkaBolt generateStringSerializerBolt() { Properties props = new Properties(); props.put("acks", "1"); props.put("bootstrap.servers", broker.getBrokerConnectionString()); props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); props.put("metadata.fetch.timeout.ms", 1000); KafkaBolt bolt = new KafkaBolt().withProducerProperties(props); bolt.prepare(config, null, new OutputCollector(collector)); bolt.setAsync(false); return bolt; } private KafkaBolt generateDefaultSerializerBolt(boolean async, boolean fireAndForget, KafkaProducer<?, ?> mockProducer) throws Exception { Properties props = new Properties(); props.put("acks", "1"); props.put("bootstrap.servers", broker.getBrokerConnectionString()); props.put("key.serializer", "org.apache.kafka.common.serialization.ByteArraySerializer"); props.put("value.serializer", "org.apache.kafka.common.serialization.ByteArraySerializer"); props.put("metadata.fetch.timeout.ms", 1000); props.put("linger.ms", 0); KafkaBolt bolt = new KafkaBolt().withProducerProperties(props); bolt.prepare(config, null, new OutputCollector(collector)); bolt.setAsync(async); bolt.setFireAndForget(fireAndForget); if (mockProducer != null) { Field producerField = bolt.getClass().getDeclaredField("producer"); producerField.setAccessible(true); producerField.set(bolt, mockProducer); } return bolt; } private KafkaBolt defaultSerializerBoltWithSpecifiedProperties(boolean async, boolean fireAndForget) { Properties props = new Properties(); props.put("acks", "1"); props.put("bootstrap.servers", broker.getBrokerConnectionString()); props.put("key.serializer", "org.apache.kafka.common.serialization.ByteArraySerializer"); props.put("value.serializer", "org.apache.kafka.common.serialization.ByteArraySerializer"); props.put("metadata.fetch.timeout.ms", 1000); props.put("linger.ms", 0); KafkaBolt bolt = new KafkaBolt().withProducerProperties(props); bolt.prepare(config, null, new OutputCollector(collector)); bolt.setAsync(async); bolt.setFireAndForget(fireAndForget); return bolt; } @Test public void executeWithoutKey() throws Exception { String message = "value-234"; Tuple tuple = generateTestTuple(message); bolt.execute(tuple); verify(collector).ack(tuple); verifyMessage(null, message); } @Test public void executeWithBrokerDown() throws Exception { broker.shutdown(); String message = "value-234"; Tuple tuple = generateTestTuple(message); bolt.execute(tuple); verify(collector).fail(tuple); } private boolean verifyMessage(String key, String message) { long lastMessageOffset = KafkaUtils.getOffset(simpleConsumer, kafkaConfig.topic, 0, OffsetRequest.LatestTime()) - 1; ByteBufferMessageSet messageAndOffsets = KafkaUtils.fetchMessages(kafkaConfig, simpleConsumer, new Partition( Broker.fromString(broker.getBrokerConnectionString()), kafkaConfig.topic, 0), lastMessageOffset); MessageAndOffset messageAndOffset = messageAndOffsets.iterator().next(); Message kafkaMessage = messageAndOffset.message(); ByteBuffer messageKeyBuffer = kafkaMessage.key(); String keyString = null; String messageString = new String(Utils.toByteArray(kafkaMessage.payload())); if (messageKeyBuffer != null) { keyString = new String(Utils.toByteArray(messageKeyBuffer)); } assertEquals(key, keyString); assertEquals(message, messageString); return true; } private Tuple generateTestTuple(Object key, Object message) { TopologyBuilder builder = new TopologyBuilder(); GeneralTopologyContext topologyContext = new GeneralTopologyContext(builder.createTopology(), new Config(), new HashMap<>(), new HashMap<>(), new HashMap<>(), "") { @Override public Fields getComponentOutputFields(String componentId, String streamId) { return new Fields("key", "message"); } }; return new TupleImpl(topologyContext, new Values(key, message), topologyContext.getComponentId(1), 1, ""); } private Tuple generateTestTuple(Object message) { TopologyBuilder builder = new TopologyBuilder(); GeneralTopologyContext topologyContext = new GeneralTopologyContext(builder.createTopology(), new Config(), new HashMap<>(), new HashMap<>(), new HashMap<>(), "") { @Override public Fields getComponentOutputFields(String componentId, String streamId) { return new Fields("message"); } }; return new TupleImpl(topologyContext, new Values(message), topologyContext.getComponentId(1), 1, ""); } private Tuple mockTickTuple() { Tuple tuple = mock(Tuple.class); when(tuple.getSourceComponent()).thenReturn(Constants.SYSTEM_COMPONENT_ID); when(tuple.getSourceStreamId()).thenReturn(Constants.SYSTEM_TICK_STREAM_ID); // Sanity check assertTrue(TupleUtils.isTick(tuple)); return tuple; } }
package com.midi_automator.tests.functional; import static com.midi_automator.tests.utils.GUIAutomations.addAutomation; import static com.midi_automator.tests.utils.GUIAutomations.automationsDelayCell; import static com.midi_automator.tests.utils.GUIAutomations.cancelDialog; import static com.midi_automator.tests.utils.GUIAutomations.cancelMidiLearnAutomation; import static com.midi_automator.tests.utils.GUIAutomations.clickAutomationMovableCheckBox; import static com.midi_automator.tests.utils.GUIAutomations.clickNextFile; import static com.midi_automator.tests.utils.GUIAutomations.deleteAutomation; import static com.midi_automator.tests.utils.GUIAutomations.getFileList; import static com.midi_automator.tests.utils.GUIAutomations.getGUIAutomationTable; import static com.midi_automator.tests.utils.GUIAutomations.midiLearnAutomation; import static com.midi_automator.tests.utils.GUIAutomations.midiUnLearnAutomation; import static com.midi_automator.tests.utils.GUIAutomations.moveUpEntry; import static com.midi_automator.tests.utils.GUIAutomations.openAddDialog; import static com.midi_automator.tests.utils.GUIAutomations.openEntryByDoubleClick; import static com.midi_automator.tests.utils.GUIAutomations.openPreferences; import static com.midi_automator.tests.utils.GUIAutomations.openScreenshotFileChooser; import static com.midi_automator.tests.utils.GUIAutomations.removeScreenshotFromAutomation; import static com.midi_automator.tests.utils.GUIAutomations.robot; import static com.midi_automator.tests.utils.GUIAutomations.saveDialog; import static com.midi_automator.tests.utils.GUIAutomations.setAutomationMinDelay; import static com.midi_automator.tests.utils.GUIAutomations.setAutomationMinSimilarity; import static com.midi_automator.tests.utils.GUIAutomations.setAutomationScanRate; import static com.midi_automator.tests.utils.GUIAutomations.setAutomationTimeout; import static com.midi_automator.tests.utils.GUIAutomations.setAutomationTrigger; import static com.midi_automator.tests.utils.GUIAutomations.setAutomationType; import static com.midi_automator.tests.utils.GUIAutomations.spinDownAutomationDelaySpinner; import static com.midi_automator.tests.utils.GUIAutomations.spinUpAutomationDelaySpinner; import static org.junit.Assert.fail; import java.awt.Point; import java.io.File; import javax.sound.midi.InvalidMidiDataException; import javax.sound.midi.MidiUnavailableException; import javax.sound.midi.ShortMessage; import org.assertj.swing.data.TableCell; import org.assertj.swing.exception.UnexpectedException; import org.assertj.swing.fixture.DialogFixture; import org.assertj.swing.fixture.JFileChooserFixture; import org.assertj.swing.fixture.JPopupMenuFixture; import org.assertj.swing.fixture.JTableFixture; import org.junit.Test; import com.midi_automator.Messages; import com.midi_automator.guiautomator.GUIAutomation; import com.midi_automator.tests.utils.MockUpUtils; import com.midi_automator.utils.MidiUtils; import com.midi_automator.view.windows.MainFrame.menus.MainFramePopupMenu; import com.midi_automator.view.windows.PreferencesDialog.GUIAutomationPanel.GUIAutomationTable.GUIAutomationTable; public class GUIAutomationFunctionalITCase extends FunctionalBaseCase { private String deviceName; private String propertiesAlwaysCancelAutomation; private String propertiesMidiCancelAutomation; private String propertiesMidiFullMainFrameAutomation; private String propertiesMidiHelloWorldAutomation; private String propertiesOnceMainFrame; private String propertiesOncePerOpeningHelloWorld1PopupAndAlwaysCancelAutomation; private String propertiesAutomationMidiLearned; private int messageType = ShortMessage.CONTROL_CHANGE; private int midiChannel = 1; private int midiControlNo = 109; private int midiValue = 127; public GUIAutomationFunctionalITCase() { if (System.getProperty("os.name").equals("Mac OS X")) { deviceName = "Bus 1"; propertiesAlwaysCancelAutomation = "automation_cancel_always_left_Mac.properties"; propertiesMidiHelloWorldAutomation = "automation_hello_world_1_midi_left_Mac.properties"; propertiesOncePerOpeningHelloWorld1PopupAndAlwaysCancelAutomation = "automation_popup_and_cancel_Mac.properties"; propertiesMidiCancelAutomation = "automation_cancel_midi_left_Mac.properties"; propertiesMidiFullMainFrameAutomation = "automation_midi_automator_midi_left_Mac.properties"; propertiesOnceMainFrame = "automation_main_frame_once_left_Mac.properties"; propertiesAutomationMidiLearned = "automation_cancel_midi_learned_left_Mac.properties"; } if (System.getProperty("os.name").contains("Windows")) { deviceName = "LoopBe Internal MIDI"; propertiesAlwaysCancelAutomation = "automation_cancel_always_left_Windows" + ".properties"; propertiesMidiHelloWorldAutomation = "automation_hello_world_1_midi_left_Windows.properties"; propertiesOncePerOpeningHelloWorld1PopupAndAlwaysCancelAutomation = "automation_popup_and_cancel_Windows.properties"; propertiesMidiCancelAutomation = "automation_cancel_midi_left_Windows.properties"; propertiesMidiFullMainFrameAutomation = "automation_midi_automator_midi_left_Windows.properties"; propertiesOnceMainFrame = "automation_main_frame_once_left_Windows.properties"; propertiesAutomationMidiLearned = "automation_cancel_midi_learned_left_Windows.properties"; } } @Test public void minMaxSimilarMidiAutomatorShouldBeClicked() { MockUpUtils.setMockupPropertiesFile("mockups/" + propertiesMidiFullMainFrameAutomation); MockUpUtils.setMockupMidoFile("mockups/full_list.mido"); startApplication(); try { // send midi trigger MidiUtils.sendMidiMessage(deviceName, messageType, midiChannel, midiControlNo, midiValue); Thread.sleep(8000); // search clicked Midi Automator getFileList().requireSelectedItems(6); // change view of Midi Automator moveUpEntry(5); // decrease similarity DialogFixture preferencesDialog = openPreferences(); setAutomationMinSimilarity("0.5", preferencesDialog); saveDialog(preferencesDialog); Thread.sleep(1000); // send midi trigger MidiUtils.sendMidiMessage(deviceName, messageType, midiChannel, midiControlNo, midiValue); Thread.sleep(5000); // search clicked Midi Automator getFileList().requireSelectedItems(7); } catch (InterruptedException | InvalidMidiDataException | MidiUnavailableException e) { e.printStackTrace(); } } @Test public void newAutomationShouldBeAdded() { MockUpUtils.setMockupPropertiesFile("mockups/empty.properties"); MockUpUtils.setMockupMidoFile("mockups/empty.mido"); startApplication(); DialogFixture preferencesDialog = openPreferences(); addAutomation(preferencesDialog); JTableFixture table = getGUIAutomationTable(preferencesDialog); table.requireRowCount(1); } @Test public void automationShouldBeDeleted() { MockUpUtils .setMockupPropertiesFile("mockups/automation1_empty.properties"); MockUpUtils.setMockupMidoFile("mockups/empty.mido"); startApplication(); DialogFixture preferencesDialog = openPreferences(); deleteAutomation(0, preferencesDialog); JTableFixture table = getGUIAutomationTable(preferencesDialog); table.requireRowCount(0); } @Test public void addDialogShouldAlwaysBeCanceled() { try { MockUpUtils.setMockupPropertiesFile("mockups/" + propertiesAlwaysCancelAutomation); MockUpUtils.setMockupMidoFile("mockups/empty.mido"); startApplication(); // check if add dialog was canceled DialogFixture addDialog = openAddDialog(); Thread.sleep(6000); addDialog.requireNotVisible(); // check if add dialog was canceled again addDialog = openAddDialog(); Thread.sleep(6000); addDialog.requireNotVisible(); // check if add dialog was canceled again addDialog = openAddDialog(); Thread.sleep(6000); addDialog.requireNotVisible(); } catch (InterruptedException e) { e.printStackTrace(); } } @Test public void addDialogShouldBeCanceledOnce() { try { MockUpUtils.setMockupPropertiesFile("mockups/" + propertiesAlwaysCancelAutomation); MockUpUtils.setMockupMidoFile("mockups/Hello_World_12_empty.mido"); startApplication(); // set trigger to once DialogFixture preferencesDialog = openPreferences(); setAutomationTrigger(GUIAutomation.TRIGGER_ONCE, 0, preferencesDialog); saveDialog(preferencesDialog); // check if add dialog was canceled DialogFixture addDialog = openAddDialog(); Thread.sleep(5000); addDialog.requireNotVisible(); // check if add dialog was not canceled again addDialog = openAddDialog(); Thread.sleep(5000); addDialog.requireVisible(); cancelDialog(addDialog); // check if add dialog was not canceled after opening clickNextFile(); addDialog = openAddDialog(); Thread.sleep(5000); addDialog.requireVisible(); cancelDialog(addDialog); } catch (InterruptedException e) { e.printStackTrace(); } } @Test public void addDialogShouldBeCanceledOncePerOpening() { try { MockUpUtils.setMockupPropertiesFile("mockups/" + propertiesAlwaysCancelAutomation); MockUpUtils.setMockupMidoFile("mockups/Hello_World_12_empty.mido"); startApplication(); // set trigger to once per opening DialogFixture preferencesDialog = openPreferences(); setAutomationTrigger(GUIAutomation.TRIGGER_ONCE_PER_CHANGE, 0, preferencesDialog); saveDialog(preferencesDialog); // check if add dialog was not canceled before opening DialogFixture addDialog = openAddDialog(); Thread.sleep(5000); addDialog.requireVisible(); cancelDialog(addDialog); // check if add dialog was canceled after opening clickNextFile(); addDialog = openAddDialog(); Thread.sleep(5000); addDialog.requireNotVisible(); // check if add dialog was canceled twice after opening addDialog = openAddDialog(); Thread.sleep(5000); addDialog.requireVisible(); cancelDialog(addDialog); } catch (InterruptedException e) { e.printStackTrace(); } } // @Test // Blocked due to // https://github.com/joel-costigliola/assertj-swing/issues/169" public void automationMidiLearnShouldBeCanceled() { try { MockUpUtils.setMockupPropertiesFile("mockups/" + propertiesMidiCancelAutomation); MockUpUtils.setMockupMidoFile("mockups/empty.mido"); startApplication(); // cancel midi learn DialogFixture preferencesDialog = openPreferences(); midiLearnAutomation(0, preferencesDialog); Thread.sleep(1000); cancelMidiLearnAutomation(0, preferencesDialog); // check for empty midi message JTableFixture table = getGUIAutomationTable(preferencesDialog); int column = table .columnIndexFor(GUIAutomationTable.COLNAME_MIDI_SIGNATURE); table.requireCellValue(TableCell.row(0).column(column), ""); } catch (InterruptedException e) { e.printStackTrace(); } } @Test public void automationMidiShouldBeUnlearned() { try { MockUpUtils.setMockupPropertiesFile("mockups/" + propertiesAutomationMidiLearned); MockUpUtils.setMockupMidoFile("mockups/empty.mido"); startApplication(); // unlearn midi DialogFixture preferencesDialog = openPreferences(); midiUnLearnAutomation(0, preferencesDialog); Thread.sleep(1000); // check for empty midi message JTableFixture table = getGUIAutomationTable(preferencesDialog); int column = table .columnIndexFor(GUIAutomationTable.COLNAME_MIDI_SIGNATURE); table.requireCellValue(TableCell.row(0).column(column), ""); } catch (InterruptedException e) { e.printStackTrace(); } } @Test public void addDialogShouldBeCanceledOnceByMidi() { try { MockUpUtils.setMockupPropertiesFile("mockups/" + propertiesMidiCancelAutomation); MockUpUtils.setMockupMidoFile("mockups/empty.mido"); startApplication(); // midi learn automation DialogFixture preferencesDialog = openPreferences(); midiLearnAutomation(0, preferencesDialog); Thread.sleep(1000); MidiUtils.sendMidiMessage(deviceName, messageType, midiChannel, midiControlNo, midiValue); Thread.sleep(2000); // check for learned midi message JTableFixture table = getGUIAutomationTable(preferencesDialog); int column = table .columnIndexFor(GUIAutomationTable.COLNAME_MIDI_SIGNATURE); table.requireCellValue(TableCell.row(0).column(column), "channel 1: CONTROL CHANGE 109 value: 127"); saveDialog(preferencesDialog); // check if add dialog was canceled by some other trigger DialogFixture addDialog = openAddDialog(); Thread.sleep(5000); addDialog.requireVisible(); // send midi trigger MidiUtils.sendMidiMessage(deviceName, messageType, midiChannel, midiControlNo, midiValue); Thread.sleep(5000); // check if add dialog was canceled addDialog.requireNotVisible(); } catch (InterruptedException | InvalidMidiDataException | MidiUnavailableException e) { e.printStackTrace(); } } @Test public void addDialogShallBeCanceledWithDelay() { try { MockUpUtils.setMockupPropertiesFile("mockups/" + propertiesAlwaysCancelAutomation); MockUpUtils.setMockupMidoFile("mockups/empty.mido"); startApplication(); // open preferences DialogFixture preferencesDialog = openPreferences(); // set delay setAutomationMinDelay("6000", 0, preferencesDialog); saveDialog(preferencesDialog); // check if add dialog was canceled before delay DialogFixture addDialog = openAddDialog(); Thread.sleep(5000); addDialog.requireVisible(); // check if add dialog was canceled after delay Thread.sleep(5000); addDialog.requireNotVisible(); } catch (InterruptedException e) { e.printStackTrace(); } } @Test public void addDialogShallNotBeCanceledAfterTimeout() { try { MockUpUtils.setMockupPropertiesFile("mockups/" + propertiesAlwaysCancelAutomation); MockUpUtils.setMockupMidoFile("mockups/empty.mido"); startApplication(); // open preferences DialogFixture preferencesDialog = openPreferences(); // set delay setAutomationTimeout("10000", 0, preferencesDialog); saveDialog(preferencesDialog); // check if add dialog was canceled before time out DialogFixture addDialog = openAddDialog(); Thread.sleep(5000); addDialog.requireNotVisible(); // check if add dialog was not canceled after time out Thread.sleep(6000); addDialog = openAddDialog(); Thread.sleep(5000); addDialog.requireVisible(); } catch (InterruptedException e) { e.printStackTrace(); } } @Test public void popUpMenuShouldBeOpenedOnce() { try { MockUpUtils.setMockupPropertiesFile("mockups/" + propertiesOnceMainFrame); MockUpUtils.setMockupMidoFile("mockups/empty.mido"); startApplication(); // set type to once right click DialogFixture preferencesDialog = openPreferences(); setAutomationType(GUIAutomation.CLICKTYPE_RIGHT, 0, preferencesDialog); saveDialog(preferencesDialog); Thread.sleep(1000); // check if popup menu appears Thread.sleep(5000); JPopupMenuFixture popupMenu = new JPopupMenuFixture(robot, ctx.getBean(MainFramePopupMenu.class)); popupMenu.requireVisible(); } catch (InterruptedException e) { e.printStackTrace(); } } @Test public void fileShouldBeOpenedByDoubleClickOnce() { try { MockUpUtils.setMockupPropertiesFile("mockups/" + propertiesMidiHelloWorldAutomation); MockUpUtils .setMockupMidoFile("mockups/Hello_World_12_no_file.mido"); startApplication(); // set trigger to double click once DialogFixture preferencesDialog = openPreferences(); setAutomationType(GUIAutomation.CLICKTYPE_DOUBLE, 0, preferencesDialog); setAutomationTrigger(GUIAutomation.TRIGGER_ONCE, 0, preferencesDialog); setAutomationMinDelay("1000", 0, preferencesDialog); saveDialog(preferencesDialog); window.focus(); // check if file was opened Thread.sleep(5000); checkInfoText(String.format(Messages.MSG_FILE_LIST_NOT_FOUND, "./testfiles/Hello World_no_file.rtf")); } catch (InterruptedException e) { e.printStackTrace(); } } @Test public void delaySpinnerShouldNotSpinBelow0() { MockUpUtils .setMockupPropertiesFile("mockups/automation1_empty.properties"); MockUpUtils.setMockupMidoFile("mockups/empty.mido"); startApplication(); DialogFixture preferencesDialog = openPreferences(); // spin up two times spinUpAutomationDelaySpinner(2, 0, preferencesDialog); // check for delay = 2 JTableFixture table = getGUIAutomationTable(preferencesDialog); table.requireCellValue(automationsDelayCell(0, preferencesDialog), "2"); // spin down three times spinDownAutomationDelaySpinner(3, 0, preferencesDialog); // check for delay = 0 table.requireCellValue(automationsDelayCell(0, preferencesDialog), "0"); } @Test public void delayShouldNotTakeInvalidValues() { MockUpUtils .setMockupPropertiesFile("mockups/automation1_empty.properties"); MockUpUtils.setMockupMidoFile("mockups/empty.mido"); startApplication(); DialogFixture preferencesDialog = openPreferences(); // set delay negative // change keyboard layout to EN to be sure this is working properly try { setAutomationMinDelay("-1000", 0, preferencesDialog); } catch (UnexpectedException e) { } saveDialog(preferencesDialog); preferencesDialog = openPreferences(); // check for delay = 0 JTableFixture table = getGUIAutomationTable(preferencesDialog); table.click(); table.requireCellValue(automationsDelayCell(0, preferencesDialog), "0"); // set delay nonsense try { setAutomationMinDelay("$%Ghg12", 0, preferencesDialog); } catch (UnexpectedException e) { } saveDialog(preferencesDialog); preferencesDialog = openPreferences(); // check for delay = 0 table = getGUIAutomationTable(preferencesDialog); table.click(); table.requireCellValue(automationsDelayCell(0, preferencesDialog), "0"); } @Test public void multipleAutomationsShouldBeRun() { try { MockUpUtils .setMockupPropertiesFile("mockups/" + propertiesOncePerOpeningHelloWorld1PopupAndAlwaysCancelAutomation); MockUpUtils.setMockupMidoFile("mockups/Hello_World_12_empty.mido"); startApplication(); // check if dialogs are always canceled DialogFixture addDialog = openAddDialog(); Thread.sleep(5000); addDialog.requireNotVisible(); // check if popup menu is opened after file opening openEntryByDoubleClick(1); Thread.sleep(5000); JPopupMenuFixture popupMenu = new JPopupMenuFixture(robot, ctx.getBean(MainFramePopupMenu.class)); popupMenu.requireVisible(); // check if dialogs are always canceled addDialog = openAddDialog(); Thread.sleep(5000); addDialog.requireNotVisible(); } catch (InterruptedException e) { e.printStackTrace(); } } @Test public void movableVsNonMovableAutomation() { try { MockUpUtils.setMockupPropertiesFile("mockups/" + propertiesAlwaysCancelAutomation); MockUpUtils.setMockupMidoFile("mockups/empty.mido"); startApplication(); // check if add dialog was canceled unmoved DialogFixture addDialog = openAddDialog(); Thread.sleep(5000); addDialog.requireNotVisible(); // move cancel button window.moveTo(new Point(500, 200)); // check if dialog was not canceled addDialog = openAddDialog(); Thread.sleep(5000); addDialog.requireVisible(); cancelDialog(addDialog); // activate movable DialogFixture preferencesDialog = openPreferences(); clickAutomationMovableCheckBox(0, preferencesDialog); saveDialog(preferencesDialog); // check if add dialog was canceled unmoved addDialog = openAddDialog(); Thread.sleep(5000); addDialog.requireNotVisible(); // move cancel button window.moveTo(new Point(10, 10)); // check if add dialog was canceled unmoved addDialog = openAddDialog(); Thread.sleep(5000); addDialog.requireNotVisible(); } catch (InterruptedException e) { e.printStackTrace(); } } @Test public void scanRateShouldBeSet() { try { MockUpUtils .setMockupPropertiesFile("mockups/" + propertiesOncePerOpeningHelloWorld1PopupAndAlwaysCancelAutomation); MockUpUtils.setMockupMidoFile("mockups/Hello_World_12_empty.mido"); startApplication(); // check if add dialog was canceled within 5 seconds DialogFixture addDialog = openAddDialog(); Thread.sleep(5000); addDialog.requireNotVisible(); // set scan rate to 0.1 DialogFixture preferencesDialog = openPreferences(); setAutomationScanRate("0.1", 0, preferencesDialog); clickAutomationMovableCheckBox(0, preferencesDialog); saveDialog(preferencesDialog); // check if add dialog was canceled after 10 seconds addDialog = openAddDialog(); Thread.sleep(5000); addDialog.requireVisible(); Thread.sleep(5000); addDialog.requireNotVisible(); } catch (InterruptedException e) { e.printStackTrace(); } } @Test public void storedAutomationsShouldBeActivatedAfterpreferencesDialogWasCanceled() { try { MockUpUtils .setMockupPropertiesFile("mockups/" + propertiesOncePerOpeningHelloWorld1PopupAndAlwaysCancelAutomation); MockUpUtils.setMockupMidoFile("mockups/Hello_World_12_empty.mido"); startApplication(); // check if add dialog was canceled DialogFixture addDialog = openAddDialog(); Thread.sleep(5000); addDialog.requireNotVisible(); // open and cancel preferences DialogFixture preferencesDialog = openPreferences(); cancelDialog(preferencesDialog); // check if add dialog was canceled addDialog = openAddDialog(); Thread.sleep(5000); addDialog.requireNotVisible(); } catch (InterruptedException e) { e.printStackTrace(); } } @Test public void screenshotChooserShouldRememberLastDirectory() { MockUpUtils.setMockupMidoFile("mockups/empty.mido"); MockUpUtils .setMockupPropertiesFile("mockups/automation1_empty.properties"); startApplication(); DialogFixture preferencesDialog = openPreferences(); JFileChooserFixture fileChooser = openScreenshotFileChooser(0, preferencesDialog); // choose a file fileChooser.setCurrentDirectory(new File(currentPath + File.separator + "testfiles")); String cancelButtonImage1 = currentPath + File.separator + "testfiles" + File.separator + "cancel_button.png"; File cancelButtonImageFile1 = new File(cancelButtonImage1); fileChooser.selectFile(cancelButtonImageFile1); fileChooser.approve(); saveDialog(preferencesDialog); // re-select file preferencesDialog = openPreferences(); fileChooser = openScreenshotFileChooser(0, preferencesDialog); String cancelButtonImage2 = currentPath + File.separator + "testfiles" + File.separator + "cancel_button2.png"; File cancelButtonImageFile2 = new File(cancelButtonImage2); fileChooser.selectFile(cancelButtonImageFile2); fileChooser.approve(); if (!sikulix.checkforStates("cancel_button2.png")) { fail(); } } @Test public void screenshotShouldBeRemoved() { try { MockUpUtils.setMockupMidoFile("mockups/empty.mido"); MockUpUtils.setMockupPropertiesFile("mockups/" + propertiesAlwaysCancelAutomation); startApplication(); DialogFixture preferencesDialog = openPreferences(); removeScreenshotFromAutomation(0, preferencesDialog); saveDialog(preferencesDialog); // check if screenshot was removed DialogFixture addDialog = openAddDialog(); Thread.sleep(5000); addDialog.requireVisible(); } catch (InterruptedException e) { e.printStackTrace(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.net.async; import java.io.IOException; import java.net.InetAddress; import java.util.HashMap; import java.util.Map; import java.util.Optional; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.atomic.AtomicReference; import javax.net.ssl.SSLHandshakeException; import com.google.common.net.InetAddresses; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import io.netty.channel.ChannelFuture; import io.netty.channel.ChannelPromise; import io.netty.channel.embedded.EmbeddedChannel; import org.apache.cassandra.auth.AllowAllInternodeAuthenticator; import org.apache.cassandra.auth.IInternodeAuthenticator; import org.apache.cassandra.config.Config; import org.apache.cassandra.config.DatabaseDescriptor; import org.apache.cassandra.config.EncryptionOptions.ServerEncryptionOptions; import org.apache.cassandra.exceptions.ConfigurationException; import org.apache.cassandra.locator.AbstractEndpointSnitch; import org.apache.cassandra.locator.IEndpointSnitch; import org.apache.cassandra.locator.InetAddressAndPort; import org.apache.cassandra.net.MessageOut; import org.apache.cassandra.net.MessagingService; import org.apache.cassandra.net.MessagingServiceTest; import org.apache.cassandra.net.async.OutboundHandshakeHandler.HandshakeResult; import org.apache.cassandra.net.async.OutboundMessagingConnection.State; import static org.apache.cassandra.net.MessagingService.Verb.ECHO; import static org.apache.cassandra.net.async.OutboundMessagingConnection.State.CLOSED; import static org.apache.cassandra.net.async.OutboundMessagingConnection.State.CREATING_CHANNEL; import static org.apache.cassandra.net.async.OutboundMessagingConnection.State.NOT_READY; import static org.apache.cassandra.net.async.OutboundMessagingConnection.State.READY; public class OutboundMessagingConnectionTest { private static final InetAddressAndPort LOCAL_ADDR = InetAddressAndPort.getByAddressOverrideDefaults(InetAddresses.forString("127.0.0.1"), 9998); private static final InetAddressAndPort REMOTE_ADDR = InetAddressAndPort.getByAddressOverrideDefaults(InetAddresses.forString("127.0.0.2"), 9999); private static final InetAddressAndPort RECONNECT_ADDR = InetAddressAndPort.getByAddressOverrideDefaults(InetAddresses.forString("127.0.0.3"), 9999); private static final int MESSAGING_VERSION = MessagingService.current_version; private OutboundConnectionIdentifier connectionId; private OutboundMessagingConnection omc; private EmbeddedChannel channel; private IEndpointSnitch snitch; private ServerEncryptionOptions encryptionOptions; @BeforeClass public static void before() { DatabaseDescriptor.daemonInitialization(); } @Before public void setup() { connectionId = OutboundConnectionIdentifier.small(LOCAL_ADDR, REMOTE_ADDR); omc = new OutboundMessagingConnection(connectionId, null, Optional.empty(), new AllowAllInternodeAuthenticator()); channel = new EmbeddedChannel(); omc.setChannelWriter(ChannelWriter.create(channel, omc::handleMessageResult, Optional.empty())); snitch = DatabaseDescriptor.getEndpointSnitch(); encryptionOptions = DatabaseDescriptor.getInternodeMessagingEncyptionOptions(); } @After public void tearDown() { DatabaseDescriptor.setEndpointSnitch(snitch); DatabaseDescriptor.setInternodeMessagingEncyptionOptions(encryptionOptions); channel.finishAndReleaseAll(); } @Test public void sendMessage_CreatingChannel() { Assert.assertEquals(0, omc.backlogSize()); omc.setState(CREATING_CHANNEL); Assert.assertTrue(omc.sendMessage(new MessageOut<>(ECHO), 1)); Assert.assertEquals(1, omc.backlogSize()); Assert.assertEquals(1, omc.getPendingMessages().intValue()); } @Test public void sendMessage_HappyPath() { Assert.assertEquals(0, omc.backlogSize()); omc.setState(READY); Assert.assertTrue(omc.sendMessage(new MessageOut<>(ECHO), 1)); Assert.assertEquals(0, omc.backlogSize()); Assert.assertTrue(channel.releaseOutbound()); } @Test public void sendMessage_Closed() { Assert.assertEquals(0, omc.backlogSize()); omc.setState(CLOSED); Assert.assertFalse(omc.sendMessage(new MessageOut<>(ECHO), 1)); Assert.assertEquals(0, omc.backlogSize()); Assert.assertFalse(channel.releaseOutbound()); } @Test public void shouldCompressConnection_None() { DatabaseDescriptor.setInternodeCompression(Config.InternodeCompression.none); Assert.assertFalse(OutboundMessagingConnection.shouldCompressConnection(LOCAL_ADDR, REMOTE_ADDR)); } @Test public void shouldCompressConnection_All() { DatabaseDescriptor.setInternodeCompression(Config.InternodeCompression.all); Assert.assertTrue(OutboundMessagingConnection.shouldCompressConnection(LOCAL_ADDR, REMOTE_ADDR)); } @Test public void shouldCompressConnection_SameDc() { TestSnitch snitch = new TestSnitch(); snitch.add(LOCAL_ADDR, "dc1"); snitch.add(REMOTE_ADDR, "dc1"); DatabaseDescriptor.setEndpointSnitch(snitch); DatabaseDescriptor.setInternodeCompression(Config.InternodeCompression.dc); Assert.assertFalse(OutboundMessagingConnection.shouldCompressConnection(LOCAL_ADDR, REMOTE_ADDR)); } private static class TestSnitch extends AbstractEndpointSnitch { private Map<InetAddressAndPort, String> nodeToDc = new HashMap<>(); void add(InetAddressAndPort node, String dc) { nodeToDc.put(node, dc); } public String getRack(InetAddressAndPort endpoint) { return null; } public String getDatacenter(InetAddressAndPort endpoint) { return nodeToDc.get(endpoint); } public int compareEndpoints(InetAddressAndPort target, InetAddressAndPort a1, InetAddressAndPort a2) { return 0; } } @Test public void shouldCompressConnection_DifferentDc() { TestSnitch snitch = new TestSnitch(); snitch.add(LOCAL_ADDR, "dc1"); snitch.add(REMOTE_ADDR, "dc2"); DatabaseDescriptor.setEndpointSnitch(snitch); DatabaseDescriptor.setInternodeCompression(Config.InternodeCompression.dc); Assert.assertTrue(OutboundMessagingConnection.shouldCompressConnection(LOCAL_ADDR, REMOTE_ADDR)); } @Test public void close_softClose() { close(true); } @Test public void close_hardClose() { close(false); } private void close(boolean softClose) { int count = 32; for (int i = 0; i < count; i++) omc.addToBacklog(new QueuedMessage(new MessageOut<>(ECHO), i)); Assert.assertEquals(count, omc.backlogSize()); Assert.assertEquals(count, omc.getPendingMessages().intValue()); ScheduledFuture<?> connectionTimeoutFuture = new TestScheduledFuture(); Assert.assertFalse(connectionTimeoutFuture.isCancelled()); omc.setConnectionTimeoutFuture(connectionTimeoutFuture); ChannelWriter channelWriter = ChannelWriter.create(channel, omc::handleMessageResult, Optional.empty()); omc.setChannelWriter(channelWriter); omc.close(softClose); Assert.assertFalse(channel.isActive()); Assert.assertEquals(State.CLOSED, omc.getState()); Assert.assertEquals(0, omc.backlogSize()); Assert.assertEquals(0, omc.getPendingMessages().intValue()); int sentMessages = channel.outboundMessages().size(); if (softClose) Assert.assertTrue(count <= sentMessages); else Assert.assertEquals(0, sentMessages); Assert.assertTrue(connectionTimeoutFuture.isCancelled()); Assert.assertTrue(channelWriter.isClosed()); } @Test public void connect_IInternodeAuthFail() { IInternodeAuthenticator auth = new IInternodeAuthenticator() { public boolean authenticate(InetAddress remoteAddress, int remotePort) { return false; } public void validateConfiguration() throws ConfigurationException { } }; MessageOut messageOut = new MessageOut(MessagingService.Verb.GOSSIP_DIGEST_ACK); OutboundMessagingPool pool = new OutboundMessagingPool(REMOTE_ADDR, LOCAL_ADDR, null, new MessagingServiceTest.MockBackPressureStrategy(null).newState(REMOTE_ADDR), auth); omc = pool.getConnection(messageOut); Assert.assertSame(State.NOT_READY, omc.getState()); Assert.assertFalse(omc.connect()); } @Test public void connect_ConnectionAlreadyStarted() { omc.setState(State.CREATING_CHANNEL); Assert.assertFalse(omc.connect()); Assert.assertSame(State.CREATING_CHANNEL, omc.getState()); } @Test public void connect_ConnectionClosed() { omc.setState(State.CLOSED); Assert.assertFalse(omc.connect()); Assert.assertSame(State.CLOSED, omc.getState()); } @Test public void connectionTimeout_StateIsReady() { omc.setState(READY); ChannelFuture channelFuture = channel.newPromise(); Assert.assertFalse(omc.connectionTimeout(channelFuture)); Assert.assertEquals(READY, omc.getState()); } @Test public void connectionTimeout_StateIsClosed() { omc.setState(CLOSED); ChannelFuture channelFuture = channel.newPromise(); Assert.assertTrue(omc.connectionTimeout(channelFuture)); Assert.assertEquals(CLOSED, omc.getState()); } @Test public void connectionTimeout_AssumeConnectionTimedOut() { int count = 32; for (int i = 0; i < count; i++) omc.addToBacklog(new QueuedMessage(new MessageOut<>(ECHO), i)); Assert.assertEquals(count, omc.backlogSize()); Assert.assertEquals(count, omc.getPendingMessages().intValue()); omc.setState(CREATING_CHANNEL); ChannelFuture channelFuture = channel.newPromise(); Assert.assertTrue(omc.connectionTimeout(channelFuture)); Assert.assertEquals(NOT_READY, omc.getState()); Assert.assertEquals(0, omc.backlogSize()); Assert.assertEquals(0, omc.getPendingMessages().intValue()); } @Test public void connectCallback_FutureIsSuccess() { ChannelPromise promise = channel.newPromise(); promise.setSuccess(); Assert.assertTrue(omc.connectCallback(promise)); } @Test public void connectCallback_Closed() { ChannelPromise promise = channel.newPromise(); omc.setState(State.CLOSED); Assert.assertFalse(omc.connectCallback(promise)); } @Test public void connectCallback_FailCauseIsSslHandshake() { ChannelPromise promise = channel.newPromise(); promise.setFailure(new SSLHandshakeException("test is only a test")); Assert.assertFalse(omc.connectCallback(promise)); Assert.assertSame(State.NOT_READY, omc.getState()); } @Test public void connectCallback_FailCauseIsNPE() { ChannelPromise promise = channel.newPromise(); promise.setFailure(new NullPointerException("test is only a test")); Assert.assertFalse(omc.connectCallback(promise)); Assert.assertSame(State.NOT_READY, omc.getState()); } @Test public void connectCallback_FailCauseIsIOException() { ChannelPromise promise = channel.newPromise(); promise.setFailure(new IOException("test is only a test")); Assert.assertFalse(omc.connectCallback(promise)); Assert.assertSame(State.NOT_READY, omc.getState()); } @Test public void connectCallback_FailedAndItsClosed() { ChannelPromise promise = channel.newPromise(); promise.setFailure(new IOException("test is only a test")); omc.setState(CLOSED); Assert.assertFalse(omc.connectCallback(promise)); Assert.assertSame(State.CLOSED, omc.getState()); } @Test public void finishHandshake_GOOD() { ChannelWriter channelWriter = ChannelWriter.create(channel, omc::handleMessageResult, Optional.empty()); HandshakeResult result = HandshakeResult.success(channelWriter, MESSAGING_VERSION); ScheduledFuture<?> connectionTimeoutFuture = new TestScheduledFuture(); Assert.assertFalse(connectionTimeoutFuture.isCancelled()); omc.setChannelWriter(null); omc.setConnectionTimeoutFuture(connectionTimeoutFuture); omc.finishHandshake(result); Assert.assertFalse(channelWriter.isClosed()); Assert.assertEquals(channelWriter, omc.getChannelWriter()); Assert.assertEquals(READY, omc.getState()); Assert.assertEquals(MESSAGING_VERSION, MessagingService.instance().getVersion(REMOTE_ADDR)); Assert.assertNull(omc.getConnectionTimeoutFuture()); Assert.assertTrue(connectionTimeoutFuture.isCancelled()); } @Test public void finishHandshake_GOOD_ButClosed() { ChannelWriter channelWriter = ChannelWriter.create(channel, omc::handleMessageResult, Optional.empty()); HandshakeResult result = HandshakeResult.success(channelWriter, MESSAGING_VERSION); ScheduledFuture<?> connectionTimeoutFuture = new TestScheduledFuture(); Assert.assertFalse(connectionTimeoutFuture.isCancelled()); omc.setChannelWriter(null); omc.setState(CLOSED); omc.setConnectionTimeoutFuture(connectionTimeoutFuture); omc.finishHandshake(result); Assert.assertTrue(channelWriter.isClosed()); Assert.assertNull(omc.getChannelWriter()); Assert.assertEquals(CLOSED, omc.getState()); Assert.assertEquals(MESSAGING_VERSION, MessagingService.instance().getVersion(REMOTE_ADDR)); Assert.assertNull(omc.getConnectionTimeoutFuture()); Assert.assertTrue(connectionTimeoutFuture.isCancelled()); } @Test public void finishHandshake_DISCONNECT() { int count = 32; for (int i = 0; i < count; i++) omc.addToBacklog(new QueuedMessage(new MessageOut<>(ECHO), i)); Assert.assertEquals(count, omc.backlogSize()); HandshakeResult result = HandshakeResult.disconnect(MESSAGING_VERSION); omc.finishHandshake(result); Assert.assertNotNull(omc.getChannelWriter()); Assert.assertEquals(CREATING_CHANNEL, omc.getState()); Assert.assertEquals(MESSAGING_VERSION, MessagingService.instance().getVersion(REMOTE_ADDR)); Assert.assertEquals(count, omc.backlogSize()); } @Test public void finishHandshake_CONNECT_FAILURE() { int count = 32; for (int i = 0; i < count; i++) omc.addToBacklog(new QueuedMessage(new MessageOut<>(ECHO), i)); Assert.assertEquals(count, omc.backlogSize()); HandshakeResult result = HandshakeResult.failed(); omc.finishHandshake(result); Assert.assertEquals(NOT_READY, omc.getState()); Assert.assertEquals(MESSAGING_VERSION, MessagingService.instance().getVersion(REMOTE_ADDR)); Assert.assertEquals(0, omc.backlogSize()); } @Test public void setStateIfNotClosed_AlreadyClosed() { AtomicReference<State> state = new AtomicReference<>(CLOSED); OutboundMessagingConnection.setStateIfNotClosed(state, NOT_READY); Assert.assertEquals(CLOSED, state.get()); } @Test public void setStateIfNotClosed_NotClosed() { AtomicReference<State> state = new AtomicReference<>(READY); OutboundMessagingConnection.setStateIfNotClosed(state, NOT_READY); Assert.assertEquals(NOT_READY, state.get()); } @Test public void reconnectWithNewIp_HappyPath() { ChannelWriter channelWriter = ChannelWriter.create(channel, omc::handleMessageResult, Optional.empty()); omc.setChannelWriter(channelWriter); omc.setState(READY); OutboundConnectionIdentifier originalId = omc.getConnectionId(); omc.reconnectWithNewIp(RECONNECT_ADDR); Assert.assertFalse(omc.getConnectionId().equals(originalId)); Assert.assertTrue(channelWriter.isClosed()); Assert.assertNotSame(CLOSED, omc.getState()); } @Test public void reconnectWithNewIp_Closed() { omc.setState(CLOSED); OutboundConnectionIdentifier originalId = omc.getConnectionId(); omc.reconnectWithNewIp(RECONNECT_ADDR); Assert.assertSame(omc.getConnectionId(), originalId); Assert.assertSame(CLOSED, omc.getState()); } @Test public void reconnectWithNewIp_UnsedConnection() { omc.setState(NOT_READY); OutboundConnectionIdentifier originalId = omc.getConnectionId(); omc.reconnectWithNewIp(RECONNECT_ADDR); Assert.assertNotSame(omc.getConnectionId(), originalId); Assert.assertSame(NOT_READY, omc.getState()); } @Test public void maybeUpdateConnectionId_NoEncryption() { OutboundConnectionIdentifier connectionId = omc.getConnectionId(); int version = omc.getTargetVersion(); omc.maybeUpdateConnectionId(); Assert.assertEquals(connectionId, omc.getConnectionId()); Assert.assertEquals(version, omc.getTargetVersion()); } @Test public void maybeUpdateConnectionId_SameVersion() { ServerEncryptionOptions encryptionOptions = new ServerEncryptionOptions(); omc = new OutboundMessagingConnection(connectionId, encryptionOptions, Optional.empty(), new AllowAllInternodeAuthenticator()); OutboundConnectionIdentifier connectionId = omc.getConnectionId(); int version = omc.getTargetVersion(); omc.maybeUpdateConnectionId(); Assert.assertEquals(connectionId, omc.getConnectionId()); Assert.assertEquals(version, omc.getTargetVersion()); } @Test public void maybeUpdateConnectionId_3_X_Version() { ServerEncryptionOptions encryptionOptions = new ServerEncryptionOptions(); encryptionOptions.enabled = true; encryptionOptions.internode_encryption = ServerEncryptionOptions.InternodeEncryption.all; DatabaseDescriptor.setInternodeMessagingEncyptionOptions(encryptionOptions); omc = new OutboundMessagingConnection(connectionId, encryptionOptions, Optional.empty(), new AllowAllInternodeAuthenticator()); int peerVersion = MessagingService.VERSION_30; MessagingService.instance().setVersion(connectionId.remote(), MessagingService.VERSION_30); OutboundConnectionIdentifier connectionId = omc.getConnectionId(); omc.maybeUpdateConnectionId(); Assert.assertNotEquals(connectionId, omc.getConnectionId()); Assert.assertEquals(InetAddressAndPort.getByAddressOverrideDefaults(REMOTE_ADDR.address, DatabaseDescriptor.getSSLStoragePort()), omc.getConnectionId().remote()); Assert.assertEquals(InetAddressAndPort.getByAddressOverrideDefaults(REMOTE_ADDR.address, DatabaseDescriptor.getSSLStoragePort()), omc.getConnectionId().connectionAddress()); Assert.assertEquals(peerVersion, omc.getTargetVersion()); } }
package mkl.testarea.pdfbox2.merge; import java.awt.Shape; import java.awt.geom.AffineTransform; import java.awt.geom.GeneralPath; import java.awt.geom.Point2D; import java.awt.geom.Rectangle2D; import java.io.IOException; import java.util.ArrayList; import java.util.List; import org.apache.fontbox.util.BoundingBox; import org.apache.pdfbox.contentstream.PDFGraphicsStreamEngine; import org.apache.pdfbox.cos.COSName; import org.apache.pdfbox.pdmodel.PDPage; import org.apache.pdfbox.pdmodel.common.PDRectangle; import org.apache.pdfbox.pdmodel.font.PDCIDFontType2; import org.apache.pdfbox.pdmodel.font.PDFont; import org.apache.pdfbox.pdmodel.font.PDSimpleFont; import org.apache.pdfbox.pdmodel.font.PDTrueTypeFont; import org.apache.pdfbox.pdmodel.font.PDType0Font; import org.apache.pdfbox.pdmodel.font.PDType3CharProc; import org.apache.pdfbox.pdmodel.font.PDType3Font; import org.apache.pdfbox.pdmodel.font.PDVectorFont; import org.apache.pdfbox.pdmodel.graphics.image.PDImage; import org.apache.pdfbox.util.Matrix; import org.apache.pdfbox.util.Vector; /** * <a href="https://stackoverflow.com/questions/60052967/how-to-dense-merge-pdf-files-using-pdfbox-2-without-whitespace-near-page-breaks"> * How to dense merge PDF files using PDFBox 2 without whitespace near page breaks? * </a> * <p> * This {@link PDFGraphicsStreamEngine} looks for vertical sections of use in a page. * </p> * <p> * Beware, this is a mere proof of concept. Especially {@link #curveTo(float, float, float, float, float, float)} * needs to be improved, see the comment there. * </p> * <p> * This class corresponds to the itext5 test area class with the same name, * {@link mkl.testarea.itext5.merge.PageVerticalAnalyzer}. * </p> * * @author mklink */ public class PageVerticalAnalyzer extends PDFGraphicsStreamEngine { protected PageVerticalAnalyzer(PDPage page) { super(page); } public List<Float> getVerticalFlips() { return verticalFlips; } // // Text // @Override protected void showGlyph(Matrix textRenderingMatrix, PDFont font, int code, Vector displacement) throws IOException { super.showGlyph(textRenderingMatrix, font, code, displacement); Shape shape = calculateGlyphBounds(textRenderingMatrix, font, code); if (shape != null) { Rectangle2D rect = shape.getBounds2D(); addVerticalUseSection(rect.getMinY(), rect.getMaxY()); } } /** * Copy of <code>org.apache.pdfbox.examples.util.DrawPrintTextLocations.calculateGlyphBounds(Matrix, PDFont, int)</code>. */ private Shape calculateGlyphBounds(Matrix textRenderingMatrix, PDFont font, int code) throws IOException { GeneralPath path = null; AffineTransform at = textRenderingMatrix.createAffineTransform(); at.concatenate(font.getFontMatrix().createAffineTransform()); if (font instanceof PDType3Font) { // It is difficult to calculate the real individual glyph bounds for type 3 fonts // because these are not vector fonts, the content stream could contain almost anything // that is found in page content streams. PDType3Font t3Font = (PDType3Font) font; PDType3CharProc charProc = t3Font.getCharProc(code); if (charProc != null) { BoundingBox fontBBox = t3Font.getBoundingBox(); PDRectangle glyphBBox = charProc.getGlyphBBox(); if (glyphBBox != null) { // PDFBOX-3850: glyph bbox could be larger than the font bbox glyphBBox.setLowerLeftX(Math.max(fontBBox.getLowerLeftX(), glyphBBox.getLowerLeftX())); glyphBBox.setLowerLeftY(Math.max(fontBBox.getLowerLeftY(), glyphBBox.getLowerLeftY())); glyphBBox.setUpperRightX(Math.min(fontBBox.getUpperRightX(), glyphBBox.getUpperRightX())); glyphBBox.setUpperRightY(Math.min(fontBBox.getUpperRightY(), glyphBBox.getUpperRightY())); path = glyphBBox.toGeneralPath(); } } } else if (font instanceof PDVectorFont) { PDVectorFont vectorFont = (PDVectorFont) font; path = vectorFont.getPath(code); if (font instanceof PDTrueTypeFont) { PDTrueTypeFont ttFont = (PDTrueTypeFont) font; int unitsPerEm = ttFont.getTrueTypeFont().getHeader().getUnitsPerEm(); at.scale(1000d / unitsPerEm, 1000d / unitsPerEm); } if (font instanceof PDType0Font) { PDType0Font t0font = (PDType0Font) font; if (t0font.getDescendantFont() instanceof PDCIDFontType2) { int unitsPerEm = ((PDCIDFontType2) t0font.getDescendantFont()).getTrueTypeFont().getHeader().getUnitsPerEm(); at.scale(1000d / unitsPerEm, 1000d / unitsPerEm); } } } else if (font instanceof PDSimpleFont) { PDSimpleFont simpleFont = (PDSimpleFont) font; // these two lines do not always work, e.g. for the TT fonts in file 032431.pdf // which is why PDVectorFont is tried first. String name = simpleFont.getEncoding().getName(code); path = simpleFont.getPath(name); } else { // shouldn't happen, please open issue in JIRA System.out.println("Unknown font class: " + font.getClass()); } if (path == null) { return null; } return at.createTransformedShape(path.getBounds2D()); } // // Bitmaps // @Override public void drawImage(PDImage pdImage) throws IOException { Matrix ctm = getGraphicsState().getCurrentTransformationMatrix(); Section section = null; for (int x = 0; x < 2; x++) { for (int y = 0; y < 2; y++) { Point2D.Float point = ctm.transformPoint(x, y); if (section == null) section = new Section(point.y); else section.extendTo(point.y); } } addVerticalUseSection(section.from, section.to); } // // Paths // @Override public void appendRectangle(Point2D p0, Point2D p1, Point2D p2, Point2D p3) throws IOException { subPath = null; Section section = new Section(p0.getY()); section.extendTo(p1.getY()).extendTo(p2.getY()).extendTo(p3.getY()); currentPoint = p0; } @Override public void clip(int windingRule) throws IOException { } @Override public void moveTo(float x, float y) throws IOException { subPath = new Section(y); path.add(subPath); currentPoint = new Point2D.Float(x, y); } @Override public void lineTo(float x, float y) throws IOException { if (subPath == null) { subPath = new Section(y); path.add(subPath); } else subPath.extendTo(y); currentPoint = new Point2D.Float(x, y); } /** * Beware! This is incorrect! The control points may be outside * the vertically used range */ @Override public void curveTo(float x1, float y1, float x2, float y2, float x3, float y3) throws IOException { if (subPath == null) { subPath = new Section(y1); path.add(subPath); } else subPath.extendTo(y1); subPath.extendTo(y2).extendTo(y3); currentPoint = new Point2D.Float(x3, y3); } @Override public Point2D getCurrentPoint() throws IOException { return currentPoint; } @Override public void closePath() throws IOException { } @Override public void endPath() throws IOException { path.clear(); subPath = null; } @Override public void strokePath() throws IOException { for (Section section : path) { addVerticalUseSection(section.from, section.to); } path.clear(); subPath = null; } @Override public void fillPath(int windingRule) throws IOException { for (Section section : path) { addVerticalUseSection(section.from, section.to); } path.clear(); subPath = null; } @Override public void fillAndStrokePath(int windingRule) throws IOException { for (Section section : path) { addVerticalUseSection(section.from, section.to); } path.clear(); subPath = null; } @Override public void shadingFill(COSName shadingName) throws IOException { // TODO Auto-generated method stub } Point2D currentPoint = null; List<Section> path = new ArrayList<Section>(); Section subPath = null; static class Section { Section(double value) { this((float)value); } Section(float value) { from = value; to = value; } Section extendTo(double value) { return extendTo((float)value); } Section extendTo(float value) { if (value < from) from = value; else if (value > to) to = value; return this; } private float from; private float to; } void addVerticalUseSection(double from, double to) { addVerticalUseSection((float)from, (float)to); } void addVerticalUseSection(float from, float to) { if (to < from) { float temp = to; to = from; from = temp; } int i=0, j=0; for (; i<verticalFlips.size(); i++) { float flip = verticalFlips.get(i); if (flip < from) continue; for (j=i; j<verticalFlips.size(); j++) { flip = verticalFlips.get(j); if (flip < to) continue; break; } break; } boolean fromOutsideInterval = i%2==0; boolean toOutsideInterval = j%2==0; while (j-- > i) verticalFlips.remove(j); if (toOutsideInterval) verticalFlips.add(i, to); if (fromOutsideInterval) verticalFlips.add(i, from); } final List<Float> verticalFlips = new ArrayList<Float>(); }
/* * Copyright 2017 CurrencyFair Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.currencyfair.onesignal.model.notification; import org.apache.commons.lang3.StringUtils; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; /** * Convenient class for creating {@link NotificationRequest}. */ public final class NotificationRequestBuilder { private String id; private Boolean opened; private Long limit; private Long offset; private List<String> includedSegments = new ArrayList<>(); private List<String> excludedSegments = new ArrayList<>(); private List<Filter> filters = new ArrayList<>(); private List<String> includePlayerIds = new ArrayList<>(); private String appId; private List<String> appIds; private Map<String, String> contents = new HashMap<>(); private Map<String, String> headings = new HashMap<>(); private Map<String, String> subtitle = new HashMap<>(); private String templateId; private Boolean contentAvailable; private Boolean mutableContent; private Map<String, String> data = new HashMap<>(); private String url; private Map<String, String> iosAttachments = new HashMap<>(); private String bigPicture; private String admBigPicture; private String chromeBigPicture; private List<Button> buttons = new ArrayList<>(); private List<Button> webButtons = new ArrayList<>(); private String iosCategory; private AndroidBackgroundLayout androidBackgroundLayout; private String amazonBackgroundData; private String smallIcon; private String largeIcon; private String chromeWebIcon; private String firefoxIcon; private String admSmallIcon; private String admLargeIcon; private String chromeIcon; private String iosSound; private String androidSound; private String admSound; private String wpSound; private String wpWnsSound; private String androidLedColor; private String androidAccentColor; private AndroidVisibility androidVisibility; private IosBadgeType iosBadgeType; private Integer iosBadgeCount; private String collapseId; private String sendAfter; private DelayedOption delayedOption; private String deliveryTimeOfDay; private Integer ttl; private Integer priority; private String androidGroup; private Map<String, String> androidGroupMessage = new HashMap<>(); private String admGroup; private Map<String, String> admGroupMessage = new HashMap<>(); private Boolean ios; private Boolean android; private Boolean anyWeb; private Boolean chromeWeb; private Boolean firefox; private Boolean safari; private Boolean wp; private Boolean wpwns; private Boolean adm; private Boolean chrome; private NotificationRequestBuilder() { } public static NotificationRequestBuilder aNotificationRequest() { return new NotificationRequestBuilder(); } public NotificationRequestBuilder withId(String id) { this.id = id; return this; } public NotificationRequestBuilder withOpened(Boolean opened) { this.opened = opened; return this; } public NotificationRequestBuilder withLimit(Long limit) { this.limit = limit; return this; } public NotificationRequestBuilder withOffset(Long offset) { this.offset = offset; return this; } public NotificationRequestBuilder withIncludedSegments(List<String> includedSegments) { this.includedSegments = includedSegments; return this; } public NotificationRequestBuilder withIncludedSegment(String includedSegment) { if (StringUtils.isNotBlank(includedSegment)) { includedSegments.add(includedSegment); } return this; } public NotificationRequestBuilder withExcludedSegments(List<String> excludedSegments) { this.excludedSegments = excludedSegments; return this; } public NotificationRequestBuilder withExcludedSegment(String excludedSegment) { if (StringUtils.isNotBlank(excludedSegment)) { excludedSegments.add(excludedSegment); } return this; } public NotificationRequestBuilder withFilters(List<Filter> filters) { this.filters = filters; return this; } public NotificationRequestBuilder withFilter(Filter filter) { filters.add(filter); return this; } public NotificationRequestBuilder withIncludePlayerIds(List<String> includePlayerIds) { this.includePlayerIds = includePlayerIds; return this; } public NotificationRequestBuilder withIncludePlayerId(String includePlayerId) { if (StringUtils.isNotBlank(includePlayerId)) { includePlayerIds.add(includePlayerId); } return this; } public NotificationRequestBuilder withAppId(String appId) { this.appId = appId; return this; } public NotificationRequestBuilder withAppIds(List<String> appIds) { this.appIds = appIds; return this; } public NotificationRequestBuilder withContents(Map<String, String> contents) { this.contents = contents; return this; } public NotificationRequestBuilder withContent(String language, String value) { if (StringUtils.isNotBlank(value)) { contents.put(language, value); } return this; } public NotificationRequestBuilder withHeadings(Map<String, String> headings) { this.headings = headings; return this; } public NotificationRequestBuilder withHeading(String language, String value) { if (StringUtils.isNotBlank(value)) { headings.put(language, value); } return this; } public NotificationRequestBuilder withSubtitles(Map<String, String> subtitle) { this.subtitle = subtitle; return this; } public NotificationRequestBuilder withSubtitle(String language, String value) { if (StringUtils.isNotBlank(value)) { subtitle.put(language, value); } return this; } public NotificationRequestBuilder withTemplateId(String templateId) { this.templateId = templateId; return this; } public NotificationRequestBuilder withMutableContent(Boolean mutableContent) { this.mutableContent = mutableContent; return this; } public NotificationRequestBuilder withContentAvailable(Boolean contentAvailable) { this.contentAvailable = contentAvailable; return this; } public NotificationRequestBuilder withData(Map<String, String> data) { this.data = data; return this; } public NotificationRequestBuilder withDataElement(String key, String value) { data.put(key, value); return this; } public NotificationRequestBuilder withUrl(String url) { this.url = url; return this; } public NotificationRequestBuilder withIosAttachments(Map<String, String> iosAttachments) { this.iosAttachments = iosAttachments; return this; } public NotificationRequestBuilder withIosAttachment(String key, String value) { iosAttachments.put(key, value); return this; } public NotificationRequestBuilder withBigPicture(String bigPicture) { this.bigPicture = bigPicture; return this; } public NotificationRequestBuilder withAdmBigPicture(String admBigPicture) { this.admBigPicture = admBigPicture; return this; } public NotificationRequestBuilder withChromeBigPicture(String chromeBigPicture) { this.chromeBigPicture = chromeBigPicture; return this; } public NotificationRequestBuilder withButtons(List<Button> buttons) { this.buttons = buttons; return this; } public NotificationRequestBuilder withButton(Button button) { buttons.add(button); return this; } public NotificationRequestBuilder withWebButtons(List<Button> webButtons) { this.webButtons = webButtons; return this; } public NotificationRequestBuilder withWebButton(Button button) { webButtons.add(button); return this; } public NotificationRequestBuilder withIosCategory(String iosCategory) { this.iosCategory = iosCategory; return this; } public NotificationRequestBuilder withAndroidBackgroudLayout(AndroidBackgroundLayout androidBackgroundLayout) { this.androidBackgroundLayout = androidBackgroundLayout; return this; } public NotificationRequestBuilder withAmazonBackgroundData(String amazonBackgroundData) { this.amazonBackgroundData = amazonBackgroundData; return this; } public NotificationRequestBuilder withSmallIcon(String smallIcon) { this.smallIcon = smallIcon; return this; } public NotificationRequestBuilder withLargeIcon(String largeIcon) { this.largeIcon = largeIcon; return this; } public NotificationRequestBuilder withChromeWebIcon(String chromeWebIcon) { this.chromeWebIcon = chromeWebIcon; return this; } public NotificationRequestBuilder withFirefoxIcon(String firefoxIcon) { this.firefoxIcon = firefoxIcon; return this; } public NotificationRequestBuilder withAdmSmallIcon(String admSmallIcon) { this.admSmallIcon = admSmallIcon; return this; } public NotificationRequestBuilder withAdmLargeIcon(String admLargeIcon) { this.admLargeIcon = admLargeIcon; return this; } public NotificationRequestBuilder withChromeIcon(String chromeIcon) { this.chromeIcon = chromeIcon; return this; } public NotificationRequestBuilder withIosSound(String iosSound) { this.iosSound = iosSound; return this; } public NotificationRequestBuilder withAndroidSound(String androidSound) { this.androidSound = androidSound; return this; } public NotificationRequestBuilder withAdmSound(String admSound) { this.admSound = admSound; return this; } public NotificationRequestBuilder withWpSound(String wpSound) { this.wpSound = wpSound; return this; } public NotificationRequestBuilder withWpWnsSound(String wpWnsSound) { this.wpWnsSound = wpWnsSound; return this; } public NotificationRequestBuilder withAndroidLedColor(String androidLedColor) { this.androidLedColor = androidLedColor; return this; } public NotificationRequestBuilder withAndroidAccentColor(String androidAccentColor) { this.androidAccentColor = androidAccentColor; return this; } public NotificationRequestBuilder withAndroidVisibility(AndroidVisibility androidVisibility) { this.androidVisibility = androidVisibility; return this; } public NotificationRequestBuilder withIosBadgeType(IosBadgeType iosBadgeType) { this.iosBadgeType = iosBadgeType; return this; } public NotificationRequestBuilder withIosBadgeCount(Integer iosBadgeCount) { this.iosBadgeCount = iosBadgeCount; return this; } public NotificationRequestBuilder withCollapseId(String collapseId) { this.collapseId = collapseId; return this; } public NotificationRequestBuilder withSendAfter(String sendAfter) { this.sendAfter = sendAfter; return this; } public NotificationRequestBuilder withDelayedOption(DelayedOption delayedOption) { this.delayedOption = delayedOption; return this; } public NotificationRequestBuilder withDeliveryTimeOfDay(String deliveryTimeOfDay) { this.deliveryTimeOfDay = deliveryTimeOfDay; return this; } public NotificationRequestBuilder withTtl(Integer ttl) { this.ttl = ttl; return this; } public NotificationRequestBuilder withPriority(Integer priority) { this.priority = priority; return this; } public NotificationRequestBuilder withAndroidGroup(String androidGroup) { this.androidGroup = androidGroup; return this; } public NotificationRequestBuilder withAndroidGroupMessages(Map<String, String> androidGroupMessage) { this.androidGroupMessage = androidGroupMessage; return this; } public NotificationRequestBuilder withAndroidGroupMessage(String language, String value) { if (StringUtils.isNotBlank(value)) { androidGroupMessage.put(language, value); } return this; } public NotificationRequestBuilder withAdmGroup(String admGroup) { this.admGroup = admGroup; return this; } public NotificationRequestBuilder withAdmGroupMessages(Map<String, String> admGroupMessage) { this.admGroupMessage = admGroupMessage; return this; } public NotificationRequestBuilder withAdmGroupMessage(String language, String value) { if (StringUtils.isNotBlank(value)) { admGroupMessage.put(language, value); } return this; } public NotificationRequestBuilder withIos(Boolean ios) { this.ios = ios; return this; } public NotificationRequestBuilder withAndroid(Boolean android) { this.android = android; return this; } public NotificationRequestBuilder withAnyWeb(Boolean anyWeb) { this.anyWeb = anyWeb; return this; } public NotificationRequestBuilder withChromeWeb(Boolean chromeWeb) { this.chromeWeb = chromeWeb; return this; } public NotificationRequestBuilder withFirefox(Boolean firefox) { this.firefox = firefox; return this; } public NotificationRequestBuilder withSafari(Boolean safari) { this.safari = safari; return this; } public NotificationRequestBuilder withWp(Boolean wp) { this.wp = wp; return this; } public NotificationRequestBuilder withWpwns(Boolean wpwns) { this.wpwns = wpwns; return this; } public NotificationRequestBuilder withAdm(Boolean adm) { this.adm = adm; return this; } public NotificationRequestBuilder withChrome(Boolean chrome) { this.chrome = chrome; return this; } public NotificationRequest build() { NotificationRequest notificationRequest = new NotificationRequest(); notificationRequest.setId(id); notificationRequest.setOpened(opened); notificationRequest.setLimit(limit); notificationRequest.setOffset(offset); notificationRequest.setIncludedSegments(includedSegments); notificationRequest.setExcludedSegments(excludedSegments); notificationRequest.setFilters(filters); notificationRequest.setIncludePlayerIds(includePlayerIds); notificationRequest.setAppId(appId); notificationRequest.setAppIds(appIds); notificationRequest.setContents(contents); notificationRequest.setHeadings(headings); notificationRequest.setSubtitle(subtitle); notificationRequest.setTemplateId(templateId); notificationRequest.setContentAvailable(contentAvailable); notificationRequest.setMutableContent(mutableContent); notificationRequest.setData(data); notificationRequest.setUrl(url); notificationRequest.setIosAttachments(iosAttachments); notificationRequest.setBigPicture(bigPicture); notificationRequest.setAdmBigPicture(admBigPicture); notificationRequest.setChromeBigPicture(chromeBigPicture); notificationRequest.setButtons(buttons); notificationRequest.setWebButtons(webButtons); notificationRequest.setIosCategory(iosCategory); notificationRequest.setAndroidBackgroundLayout(androidBackgroundLayout); notificationRequest.setAmazonBackgroundData(amazonBackgroundData); notificationRequest.setSmallIcon(smallIcon); notificationRequest.setLargeIcon(largeIcon); notificationRequest.setChromeWebIcon(chromeWebIcon); notificationRequest.setFirefoxIcon(firefoxIcon); notificationRequest.setAdmSmallIcon(admSmallIcon); notificationRequest.setAdmLargeIcon(admLargeIcon); notificationRequest.setChromeIcon(chromeIcon); notificationRequest.setIosSound(iosSound); notificationRequest.setAndroidSound(androidSound); notificationRequest.setAdmSound(admSound); notificationRequest.setWpSound(wpSound); notificationRequest.setWpWnsSound(wpWnsSound); notificationRequest.setAndroidLedColor(androidLedColor); notificationRequest.setAndroidAccentColor(androidAccentColor); notificationRequest.setAndroidVisibility(androidVisibility); notificationRequest.setIosBadgeType(iosBadgeType); notificationRequest.setIosBadgeCount(iosBadgeCount); notificationRequest.setCollapseId(collapseId); notificationRequest.setSendAfter(sendAfter); notificationRequest.setDelayedOption(delayedOption); notificationRequest.setDeliveryTimeOfDay(deliveryTimeOfDay); notificationRequest.setTtl(ttl); notificationRequest.setPriority(priority); notificationRequest.setAndroidGroup(androidGroup); notificationRequest.setAndroidGroupMessage(androidGroupMessage); notificationRequest.setAdmGroup(admGroup); notificationRequest.setAdmGroupMessage(admGroupMessage); notificationRequest.setIos(ios); notificationRequest.setAndroid(android); notificationRequest.setAnyWeb(anyWeb); notificationRequest.setChromeWeb(chromeWeb); notificationRequest.setFirefox(firefox); notificationRequest.setSafari(safari); notificationRequest.setWp(wp); notificationRequest.setWpwns(wpwns); notificationRequest.setAdm(adm); notificationRequest.setChrome(chrome); return notificationRequest; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.indexer.partitions; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import java.util.Arrays; import java.util.Collections; import java.util.List; public class DimensionRangePartitionsSpecTest { private static final Integer TARGET_ROWS_PER_SEGMENT = 1; private static final Integer MAX_ROWS_PER_SEGMENT = null; private static final Integer HISTORICAL_NULL = PartitionsSpec.HISTORICAL_NULL; private static final List<String> PARTITION_DIMENSIONS = Arrays.asList("a", "b"); private static final boolean ASSUME_GROUPED = false; private static final DimensionRangePartitionsSpec SPEC = new DimensionRangePartitionsSpec( TARGET_ROWS_PER_SEGMENT, MAX_ROWS_PER_SEGMENT, PARTITION_DIMENSIONS, ASSUME_GROUPED ); private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); @Rule public ExpectedException exception = ExpectedException.none(); @Test public void serde() { String json = serialize(SPEC); DimensionRangePartitionsSpec spec = deserialize(json); Assert.assertEquals(SPEC, spec); } @Test public void havingNeitherTargetNorMaxForbidden() { new TestSpecBuilder() .testIllegalArgumentException("Exactly one of targetRowsPerSegment or maxRowsPerSegment must be present"); } @Test public void targetRowsPerSegmentMustBePositive() { new TestSpecBuilder() .targetRowsPerSegment(0) .testIllegalArgumentException("targetRowsPerSegment must be greater than 0"); } @Test public void targetRowsPerSegmentHistoricalNull() { new TestSpecBuilder() .targetRowsPerSegment(HISTORICAL_NULL) .testIllegalArgumentException("Exactly one of targetRowsPerSegment or maxRowsPerSegment must be present"); } @Test public void targetMaxRowsPerSegmentOverflows() { new TestSpecBuilder() .targetRowsPerSegment(Integer.MAX_VALUE) .testIllegalArgumentException("targetRowsPerSegment is too large"); } @Test public void maxRowsPerSegmentMustBePositive() { new TestSpecBuilder() .maxRowsPerSegment(0) .testIllegalArgumentException("maxRowsPerSegment must be greater than 0"); } @Test public void maxRowsPerSegmentHistoricalNull() { new TestSpecBuilder() .maxRowsPerSegment(HISTORICAL_NULL) .testIllegalArgumentException("Exactly one of targetRowsPerSegment or maxRowsPerSegment must be present"); } @Test public void resolvesMaxFromTargetRowsPerSegment() { DimensionRangePartitionsSpec spec = new TestSpecBuilder() .targetRowsPerSegment(123) .build(); Assert.assertEquals(184, spec.getMaxRowsPerSegment().intValue()); } @Test public void resolvesMaxFromMaxRowsPerSegment() { DimensionRangePartitionsSpec spec = new TestSpecBuilder() .maxRowsPerSegment(123) .build(); Assert.assertEquals(123, spec.getMaxRowsPerSegment().intValue()); } @Test public void getPartitionDimensionFromNull() { // Verify that partitionDimensions must be non-null new TestSpecBuilder() .partitionDimensions(null) .testIllegalArgumentException("partitionDimensions must be specified"); } @Test public void getPartitionDimensionFromNonNull() { List<String> partitionDimensions = Collections.singletonList("a"); DimensionRangePartitionsSpec spec = new TestSpecBuilder() .targetRowsPerSegment(10) .partitionDimensions(partitionDimensions) .build(); Assert.assertEquals(partitionDimensions, spec.getPartitionDimensions()); } private static String serialize(Object object) { try { return OBJECT_MAPPER.writeValueAsString(object); } catch (JsonProcessingException e) { throw new RuntimeException(e); } } private static DimensionRangePartitionsSpec deserialize(String serialized) { try { return OBJECT_MAPPER.readValue(serialized, DimensionRangePartitionsSpec.class); } catch (Exception e) { throw new RuntimeException(e); } } /** * Spec builder used in this test. */ private class TestSpecBuilder { private Integer targetRowsPerSegment; private Integer maxRowsPerSegment; private List<String> partitionDimensions = Collections.emptyList(); TestSpecBuilder targetRowsPerSegment(Integer targetRowsPerSegment) { this.targetRowsPerSegment = targetRowsPerSegment; return this; } TestSpecBuilder maxRowsPerSegment(Integer maxRowsPerSegment) { this.maxRowsPerSegment = maxRowsPerSegment; return this; } TestSpecBuilder partitionDimensions(List<String> partitionDimensions) { this.partitionDimensions = partitionDimensions; return this; } void testIllegalArgumentException(String exceptionExpectedMessage) { exception.expect(IllegalArgumentException.class); exception.expectMessage(exceptionExpectedMessage); build(); } DimensionRangePartitionsSpec build() { return new DimensionRangePartitionsSpec( targetRowsPerSegment, maxRowsPerSegment, partitionDimensions, ASSUME_GROUPED ); } } }
/* * Copyright (C) 2014 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.exoplayer.hls; import android.os.Handler; import android.os.SystemClock; import com.google.android.exoplayer.C; import com.google.android.exoplayer.LoadControl; import com.google.android.exoplayer.MediaFormat; import com.google.android.exoplayer.MediaFormatHolder; import com.google.android.exoplayer.SampleHolder; import com.google.android.exoplayer.SampleSource; import com.google.android.exoplayer.SampleSource.SampleSourceReader; import com.google.android.exoplayer.TrackRenderer; import com.google.android.exoplayer.chunk.BaseChunkSampleSourceEventListener; import com.google.android.exoplayer.chunk.Chunk; import com.google.android.exoplayer.chunk.ChunkOperationHolder; import com.google.android.exoplayer.chunk.Format; import com.google.android.exoplayer.upstream.Loader; import com.google.android.exoplayer.upstream.Loader.Loadable; import com.google.android.exoplayer.util.Assertions; import com.google.android.exoplayer.util.MimeTypes; import java.io.IOException; import java.util.Arrays; import java.util.LinkedList; /** * A {@link SampleSource} for HLS streams. */ public final class HlsSampleSource implements SampleSource, SampleSourceReader, Loader.Callback { /** * Interface definition for a callback to be notified of {@link HlsSampleSource} events. */ public interface EventListener extends BaseChunkSampleSourceEventListener {} /** * The default minimum number of times to retry loading data prior to failing. */ public static final int DEFAULT_MIN_LOADABLE_RETRY_COUNT = 3; private static final long NO_RESET_PENDING = Long.MIN_VALUE; private static final int PRIMARY_TYPE_NONE = 0; private static final int PRIMARY_TYPE_TEXT = 1; private static final int PRIMARY_TYPE_AUDIO = 2; private static final int PRIMARY_TYPE_VIDEO = 3; private final HlsChunkSource chunkSource; private final LinkedList<HlsExtractorWrapper> extractors; private final int minLoadableRetryCount; private final int bufferSizeContribution; private final ChunkOperationHolder chunkOperationHolder; private final int eventSourceId; private final LoadControl loadControl; private final Handler eventHandler; private final EventListener eventListener; private int remainingReleaseCount; private boolean prepared; private boolean loadControlRegistered; private int trackCount; private int enabledTrackCount; private Format downstreamFormat; // Tracks are complicated in HLS. See documentation of buildTracks for details. // Indexed by track (as exposed by this source). private MediaFormat[] trackFormats; private boolean[] trackEnabledStates; private boolean[] pendingDiscontinuities; private MediaFormat[] downstreamMediaFormats; // Maps track index (as exposed by this source) to the corresponding chunk source track index for // primary tracks, or to -1 otherwise. private int[] chunkSourceTrackIndices; // Maps track index (as exposed by this source) to the corresponding extractor track index. private int[] extractorTrackIndices; // Indexed by extractor track index. private boolean[] extractorTrackEnabledStates; private long downstreamPositionUs; private long lastSeekPositionUs; private long pendingResetPositionUs; private boolean loadingFinished; private Chunk currentLoadable; private TsChunk currentTsLoadable; private TsChunk previousTsLoadable; private Loader loader; private IOException currentLoadableException; private int currentLoadableExceptionCount; private long currentLoadableExceptionTimestamp; private long currentLoadStartTimeMs; public HlsSampleSource(HlsChunkSource chunkSource, LoadControl loadControl, int bufferSizeContribution) { this(chunkSource, loadControl, bufferSizeContribution, null, null, 0); } public HlsSampleSource(HlsChunkSource chunkSource, LoadControl loadControl, int bufferSizeContribution, Handler eventHandler, EventListener eventListener, int eventSourceId) { this(chunkSource, loadControl, bufferSizeContribution, eventHandler, eventListener, eventSourceId, DEFAULT_MIN_LOADABLE_RETRY_COUNT); } public HlsSampleSource(HlsChunkSource chunkSource, LoadControl loadControl, int bufferSizeContribution, Handler eventHandler, EventListener eventListener, int eventSourceId, int minLoadableRetryCount) { this.chunkSource = chunkSource; this.loadControl = loadControl; this.bufferSizeContribution = bufferSizeContribution; this.minLoadableRetryCount = minLoadableRetryCount; this.eventHandler = eventHandler; this.eventListener = eventListener; this.eventSourceId = eventSourceId; this.pendingResetPositionUs = NO_RESET_PENDING; extractors = new LinkedList<>(); chunkOperationHolder = new ChunkOperationHolder(); } @Override public SampleSourceReader register() { remainingReleaseCount++; return this; } @Override public boolean prepare(long positionUs) { if (prepared) { return true; } else if (!chunkSource.prepare()) { return false; } if (!extractors.isEmpty()) { while (true) { // We're not prepared, but we might have loaded what we need. HlsExtractorWrapper extractor = extractors.getFirst(); if (extractor.isPrepared()) { buildTracks(extractor); prepared = true; maybeStartLoading(); // Update the load control. return true; } else if (extractors.size() > 1) { extractors.removeFirst().clear(); } else { break; } } } // We're not prepared and we haven't loaded what we need. if (loader == null) { loader = new Loader("Loader:HLS"); loadControl.register(this, bufferSizeContribution); loadControlRegistered = true; } if (!loader.isLoading()) { // We're going to have to start loading a chunk to get what we need for preparation. We should // attempt to load the chunk at positionUs, so that we'll already be loading the correct chunk // in the common case where the renderer is subsequently enabled at this position. pendingResetPositionUs = positionUs; downstreamPositionUs = positionUs; } maybeStartLoading(); return false; } @Override public int getTrackCount() { Assertions.checkState(prepared); return trackCount; } @Override public MediaFormat getFormat(int track) { Assertions.checkState(prepared); return trackFormats[track]; } @Override public void enable(int track, long positionUs) { Assertions.checkState(prepared); setTrackEnabledState(track, true); downstreamMediaFormats[track] = null; pendingDiscontinuities[track] = false; downstreamFormat = null; boolean wasLoadControlRegistered = loadControlRegistered; if (!loadControlRegistered) { loadControl.register(this, bufferSizeContribution); loadControlRegistered = true; } // Treat enabling of a live stream as occurring at t=0 in both of the blocks below. positionUs = chunkSource.isLive() ? 0 : positionUs; int chunkSourceTrack = chunkSourceTrackIndices[track]; if (chunkSourceTrack != -1 && chunkSourceTrack != chunkSource.getSelectedTrackIndex()) { // This is a primary track whose corresponding chunk source track is different to the one // currently selected. We need to change the selection and restart. Since other exposed tracks // may be enabled too, we need to implement the restart as a seek so that all downstream // renderers receive a discontinuity event. chunkSource.selectTrack(chunkSourceTrack); seekToInternal(positionUs); return; } if (enabledTrackCount == 1) { lastSeekPositionUs = positionUs; if (wasLoadControlRegistered && downstreamPositionUs == positionUs) { // TODO: Address [Internal: b/21743989] to remove the need for this kind of hack. // This is the first track to be enabled after preparation and the position is the same as // was passed to prepare. In this case we can avoid restarting, which would reload the same // chunks as were loaded during preparation. maybeStartLoading(); } else { downstreamPositionUs = positionUs; restartFrom(positionUs); } } } @Override public void disable(int track) { Assertions.checkState(prepared); setTrackEnabledState(track, false); if (enabledTrackCount == 0) { chunkSource.reset(); downstreamPositionUs = Long.MIN_VALUE; if (loadControlRegistered) { loadControl.unregister(this); loadControlRegistered = false; } if (loader.isLoading()) { loader.cancelLoading(); } else { clearState(); loadControl.trimAllocator(); } } } @Override public boolean continueBuffering(int track, long playbackPositionUs) { Assertions.checkState(prepared); Assertions.checkState(trackEnabledStates[track]); downstreamPositionUs = playbackPositionUs; if (!extractors.isEmpty()) { discardSamplesForDisabledTracks(getCurrentExtractor(), downstreamPositionUs); } maybeStartLoading(); if (loadingFinished) { return true; } if (isPendingReset() || extractors.isEmpty()) { return false; } for (int extractorIndex = 0; extractorIndex < extractors.size(); extractorIndex++) { HlsExtractorWrapper extractor = extractors.get(extractorIndex); if (!extractor.isPrepared()) { break; } int extractorTrack = extractorTrackIndices[track]; if (extractor.hasSamples(extractorTrack)) { return true; } } return false; } @Override public long readDiscontinuity(int track) { if (pendingDiscontinuities[track]) { pendingDiscontinuities[track] = false; return lastSeekPositionUs; } return NO_DISCONTINUITY; } @Override public int readData(int track, long playbackPositionUs, MediaFormatHolder formatHolder, SampleHolder sampleHolder) { Assertions.checkState(prepared); downstreamPositionUs = playbackPositionUs; if (pendingDiscontinuities[track] || isPendingReset()) { return NOTHING_READ; } HlsExtractorWrapper extractor = getCurrentExtractor(); if (!extractor.isPrepared()) { return NOTHING_READ; } Format format = extractor.format; if (!format.equals(downstreamFormat)) { notifyDownstreamFormatChanged(format, extractor.trigger, extractor.startTimeUs); } downstreamFormat = format; if (extractors.size() > 1) { // If there's more than one extractor, attempt to configure a seamless splice from the // current one to the next one. extractor.configureSpliceTo(extractors.get(1)); } int extractorTrack = extractorTrackIndices[track]; int extractorIndex = 0; while (extractors.size() > extractorIndex + 1 && !extractor.hasSamples(extractorTrack)) { // We're finished reading from the extractor for this particular track, so advance to the // next one for the current read. extractor = extractors.get(++extractorIndex); if (!extractor.isPrepared()) { return NOTHING_READ; } } MediaFormat mediaFormat = extractor.getMediaFormat(extractorTrack); if (mediaFormat != null) { if (!mediaFormat.equals(downstreamMediaFormats[track])) { formatHolder.format = mediaFormat; downstreamMediaFormats[track] = mediaFormat; return FORMAT_READ; } // If mediaFormat and downstreamMediaFormat[track] are equal but different objects then the // equality check above will have been expensive, comparing the fields in each format. We // update downstreamMediaFormat here so that referential equality can be cheaply established // during subsequent calls. downstreamMediaFormats[track] = mediaFormat; } if (extractor.getSample(extractorTrack, sampleHolder)) { boolean decodeOnly = sampleHolder.timeUs < lastSeekPositionUs; sampleHolder.flags |= decodeOnly ? C.SAMPLE_FLAG_DECODE_ONLY : 0; return SAMPLE_READ; } if (loadingFinished) { return END_OF_STREAM; } return NOTHING_READ; } @Override public void maybeThrowError() throws IOException { if (currentLoadableException != null && currentLoadableExceptionCount > minLoadableRetryCount) { throw currentLoadableException; } else if (currentLoadable == null) { chunkSource.maybeThrowError(); } } @Override public void seekToUs(long positionUs) { Assertions.checkState(prepared); Assertions.checkState(enabledTrackCount > 0); // make sure we are not seeking behind the live window positionUs = chunkSource.limitToliveEdge(positionUs); // Ignore seeks to the current position. long currentPositionUs = isPendingReset() ? pendingResetPositionUs : downstreamPositionUs; downstreamPositionUs = positionUs; lastSeekPositionUs = positionUs; if (currentPositionUs == positionUs) { return; } seekToInternal(positionUs); } @Override public long getBufferedPositionUs() { Assertions.checkState(prepared); Assertions.checkState(enabledTrackCount > 0); if (isPendingReset()) { return pendingResetPositionUs; } else if (loadingFinished) { return TrackRenderer.END_OF_TRACK_US; } else { long largestParsedTimestampUs = extractors.getLast().getLargestParsedTimestampUs(); if (extractors.size() > 1) { // When adapting from one format to the next, the penultimate extractor may have the largest // parsed timestamp (e.g. if the last extractor hasn't parsed any timestamps yet). largestParsedTimestampUs = Math.max(largestParsedTimestampUs, extractors.get(extractors.size() - 2).getLargestParsedTimestampUs()); } return largestParsedTimestampUs == Long.MIN_VALUE ? downstreamPositionUs : largestParsedTimestampUs; } } @Override public void release() { Assertions.checkState(remainingReleaseCount > 0); if (--remainingReleaseCount == 0 && loader != null) { if (loadControlRegistered) { loadControl.unregister(this); loadControlRegistered = false; } loader.release(); loader = null; } } // Loader.Callback implementation. @Override public void onLoadCompleted(Loadable loadable) { Assertions.checkState(loadable == currentLoadable); long now = SystemClock.elapsedRealtime(); long loadDurationMs = now - currentLoadStartTimeMs; chunkSource.onChunkLoadCompleted(currentLoadable); if (isTsChunk(currentLoadable)) { Assertions.checkState(currentLoadable == currentTsLoadable); previousTsLoadable = currentTsLoadable; notifyLoadCompleted(currentLoadable.bytesLoaded(), currentTsLoadable.type, currentTsLoadable.trigger, currentTsLoadable.format, currentTsLoadable.startTimeUs, currentTsLoadable.endTimeUs, now, loadDurationMs); } else { notifyLoadCompleted(currentLoadable.bytesLoaded(), currentLoadable.type, currentLoadable.trigger, currentLoadable.format, -1, -1, now, loadDurationMs); } clearCurrentLoadable(); maybeStartLoading(); } @Override public void onLoadCanceled(Loadable loadable) { notifyLoadCanceled(currentLoadable.bytesLoaded()); if (enabledTrackCount > 0) { restartFrom(pendingResetPositionUs); } else { clearState(); loadControl.trimAllocator(); } } @Override public void onLoadError(Loadable loadable, IOException e) { if (chunkSource.onChunkLoadError(currentLoadable, e)) { // Error handled by source. if (previousTsLoadable == null && !isPendingReset()) { pendingResetPositionUs = lastSeekPositionUs; } clearCurrentLoadable(); } else { currentLoadableException = e; currentLoadableExceptionCount++; currentLoadableExceptionTimestamp = SystemClock.elapsedRealtime(); } notifyLoadError(e); maybeStartLoading(); } // Internal stuff. /** * Builds tracks that are exposed by this {@link HlsSampleSource} instance, as well as internal * data-structures required for operation. * <p> * Tracks in HLS are complicated. A HLS master playlist contains a number of "variants". Each * variant stream typically contains muxed video, audio and (possibly) additional audio, metadata * and caption tracks. We wish to allow the user to select between an adaptive track that spans * all variants, as well as each individual variant. If multiple audio tracks are present within * each variant then we wish to allow the user to select between those also. * <p> * To do this, tracks are constructed as follows. The {@link HlsChunkSource} exposes (N+1) tracks, * where N is the number of variants defined in the HLS master playlist. These consist of one * adaptive track defined to span all variants and a track for each individual variant. The * adaptive track is initially selected. The extractor is then prepared to discover the tracks * inside of each variant stream. The two sets of tracks are then combined by this method to * create a third set, which is the set exposed by this {@link HlsSampleSource}: * <ul> * <li>The extractor tracks are inspected to infer a "primary" track type. If a video track is * present then it is always the primary type. If not, audio is the primary type if present. * Else text is the primary type if present. Else there is no primary type.</li> * <li>If there is exactly one extractor track of the primary type, it's expanded into (N+1) * exposed tracks, all of which correspond to the primary extractor track and each of which * corresponds to a different chunk source track. Selecting one of these tracks has the effect * of switching the selected track on the chunk source.</li> * <li>All other extractor tracks are exposed directly. Selecting one of these tracks has the * effect of selecting an extractor track, leaving the selected track on the chunk source * unchanged.</li> * </ul> * * @param extractor The prepared extractor. */ private void buildTracks(HlsExtractorWrapper extractor) { // Iterate through the extractor tracks to discover the "primary" track type, and the index // of the single track of this type. int primaryExtractorTrackType = PRIMARY_TYPE_NONE; int primaryExtractorTrackIndex = -1; int extractorTrackCount = extractor.getTrackCount(); for (int i = 0; i < extractorTrackCount; i++) { String mimeType = extractor.getMediaFormat(i).mimeType; int trackType; if (MimeTypes.isVideo(mimeType)) { trackType = PRIMARY_TYPE_VIDEO; } else if (MimeTypes.isAudio(mimeType)) { trackType = PRIMARY_TYPE_AUDIO; } else if (MimeTypes.isText(mimeType)) { trackType = PRIMARY_TYPE_TEXT; } else { trackType = PRIMARY_TYPE_NONE; } if (trackType > primaryExtractorTrackType) { primaryExtractorTrackType = trackType; primaryExtractorTrackIndex = i; } else if (trackType == primaryExtractorTrackType && primaryExtractorTrackIndex != -1) { // We have multiple tracks of the primary type. We only want an index if there only // exists a single track of the primary type, so set the index back to -1. primaryExtractorTrackIndex = -1; } } // Calculate the number of tracks that will be exposed. int chunkSourceTrackCount = chunkSource.getTrackCount(); boolean expandPrimaryExtractorTrack = primaryExtractorTrackIndex != -1; trackCount = extractorTrackCount; if (expandPrimaryExtractorTrack) { trackCount += chunkSourceTrackCount - 1; } // Instantiate the necessary internal data-structures. trackFormats = new MediaFormat[trackCount]; trackEnabledStates = new boolean[trackCount]; pendingDiscontinuities = new boolean[trackCount]; downstreamMediaFormats = new MediaFormat[trackCount]; chunkSourceTrackIndices = new int[trackCount]; extractorTrackIndices = new int[trackCount]; extractorTrackEnabledStates = new boolean[extractorTrackCount]; // Construct the set of exposed tracks. long durationUs = chunkSource.getDurationUs(); int trackIndex = 0; for (int i = 0; i < extractorTrackCount; i++) { MediaFormat format = extractor.getMediaFormat(i).copyWithDurationUs(durationUs); String language = null; if (MimeTypes.isAudio(format.mimeType)) { language = chunkSource.getMuxedAudioLanguage(); } else if (MimeTypes.APPLICATION_EIA608.equals(format.mimeType)) { language = chunkSource.getMuxedCaptionLanguage(); } if (i == primaryExtractorTrackIndex) { for (int j = 0; j < chunkSourceTrackCount; j++) { extractorTrackIndices[trackIndex] = i; chunkSourceTrackIndices[trackIndex] = j; Variant fixedTrackVariant = chunkSource.getFixedTrackVariant(j); trackFormats[trackIndex++] = fixedTrackVariant == null ? format.copyAsAdaptive(null) : copyWithFixedTrackInfo(format, fixedTrackVariant.format, language); } } else { extractorTrackIndices[trackIndex] = i; chunkSourceTrackIndices[trackIndex] = -1; trackFormats[trackIndex++] = format.copyWithLanguage(language); } } } /** * Enables or disables the track at a given index. * * @param track The index of the track. * @param enabledState True if the track is being enabled, or false if it's being disabled. */ private void setTrackEnabledState(int track, boolean enabledState) { Assertions.checkState(trackEnabledStates[track] != enabledState); int extractorTrack = extractorTrackIndices[track]; Assertions.checkState(extractorTrackEnabledStates[extractorTrack] != enabledState); trackEnabledStates[track] = enabledState; extractorTrackEnabledStates[extractorTrack] = enabledState; enabledTrackCount = enabledTrackCount + (enabledState ? 1 : -1); } /** * Copies a provided {@link MediaFormat}, incorporating information from the {@link Format} of * a fixed (i.e. non-adaptive) track, as well as a language. * * @param format The {@link MediaFormat} to copy. * @param fixedTrackFormat The {@link Format} to incorporate into the copy. * @param language The language to incorporate into the copy. * @return The copied {@link MediaFormat}. */ private static MediaFormat copyWithFixedTrackInfo(MediaFormat format, Format fixedTrackFormat, String language) { int width = fixedTrackFormat.width == -1 ? MediaFormat.NO_VALUE : fixedTrackFormat.width; int height = fixedTrackFormat.height == -1 ? MediaFormat.NO_VALUE : fixedTrackFormat.height; return format.copyWithFixedTrackInfo(fixedTrackFormat.id, fixedTrackFormat.bitrate, width, height, language); } /** * Performs a seek. The operation is performed even if the seek is to the current position. * * @param positionUs The position to seek to. */ private void seekToInternal(long positionUs) { lastSeekPositionUs = positionUs; downstreamPositionUs = positionUs; Arrays.fill(pendingDiscontinuities, true); chunkSource.seek(); restartFrom(positionUs); } /** * Gets the current extractor from which samples should be read. * <p> * Calling this method discards extractors without any samples from the front of the queue. The * last extractor is retained even if it doesn't have any samples. * <p> * This method must not be called unless {@link #extractors} is non-empty. * * @return The current extractor from which samples should be read. Guaranteed to be non-null. */ private HlsExtractorWrapper getCurrentExtractor() { HlsExtractorWrapper extractor = extractors.getFirst(); while (extractors.size() > 1 && !haveSamplesForEnabledTracks(extractor)) { // We're finished reading from the extractor for all tracks, and so can discard it. extractors.removeFirst().clear(); extractor = extractors.getFirst(); } return extractor; } private void discardSamplesForDisabledTracks(HlsExtractorWrapper extractor, long timeUs) { if (!extractor.isPrepared()) { return; } for (int i = 0; i < extractorTrackEnabledStates.length; i++) { if (!extractorTrackEnabledStates[i]) { extractor.discardUntil(i, timeUs); } } } private boolean haveSamplesForEnabledTracks(HlsExtractorWrapper extractor) { if (!extractor.isPrepared()) { return false; } for (int i = 0; i < extractorTrackEnabledStates.length; i++) { if (extractorTrackEnabledStates[i] && extractor.hasSamples(i)) { return true; } } return false; } private void restartFrom(long positionUs) { pendingResetPositionUs = positionUs; loadingFinished = false; if (loader.isLoading()) { loader.cancelLoading(); } else { clearState(); maybeStartLoading(); } } private void clearState() { for (int i = 0; i < extractors.size(); i++) { extractors.get(i).clear(); } extractors.clear(); clearCurrentLoadable(); previousTsLoadable = null; } private void clearCurrentLoadable() { currentTsLoadable = null; currentLoadable = null; currentLoadableException = null; currentLoadableExceptionCount = 0; } private void maybeStartLoading() { long now = SystemClock.elapsedRealtime(); long nextLoadPositionUs = getNextLoadPositionUs(); boolean isBackedOff = currentLoadableException != null; boolean loadingOrBackedOff = loader.isLoading() || isBackedOff; // Update the control with our current state, and determine whether we're the next loader. boolean nextLoader = loadControl.update(this, downstreamPositionUs, nextLoadPositionUs, loadingOrBackedOff); if (isBackedOff) { long elapsedMillis = now - currentLoadableExceptionTimestamp; if (elapsedMillis >= getRetryDelayMillis(currentLoadableExceptionCount)) { currentLoadableException = null; loader.startLoading(currentLoadable, this); } return; } if (loader.isLoading() || !nextLoader || (prepared && enabledTrackCount == 0)) { return; } chunkSource.getChunkOperation(previousTsLoadable, pendingResetPositionUs != NO_RESET_PENDING ? pendingResetPositionUs : downstreamPositionUs, chunkOperationHolder); boolean endOfStream = chunkOperationHolder.endOfStream; Chunk nextLoadable = chunkOperationHolder.chunk; chunkOperationHolder.clear(); if (endOfStream) { loadingFinished = true; loadControl.update(this, downstreamPositionUs, -1, false); return; } if (nextLoadable == null) { return; } currentLoadStartTimeMs = now; currentLoadable = nextLoadable; if (isTsChunk(currentLoadable)) { TsChunk tsChunk = (TsChunk) currentLoadable; if (isPendingReset()) { pendingResetPositionUs = NO_RESET_PENDING; } HlsExtractorWrapper extractorWrapper = tsChunk.extractorWrapper; if (extractors.isEmpty() || extractors.getLast() != extractorWrapper) { extractorWrapper.init(loadControl.getAllocator()); extractors.addLast(extractorWrapper); } notifyLoadStarted(tsChunk.dataSpec.length, tsChunk.type, tsChunk.trigger, tsChunk.format, tsChunk.startTimeUs, tsChunk.endTimeUs); currentTsLoadable = tsChunk; } else { notifyLoadStarted(currentLoadable.dataSpec.length, currentLoadable.type, currentLoadable.trigger, currentLoadable.format, -1, -1); } loader.startLoading(currentLoadable, this); } /** * Gets the next load time, assuming that the next load starts where the previous chunk ended (or * from the pending reset time, if there is one). */ private long getNextLoadPositionUs() { if (isPendingReset()) { return pendingResetPositionUs; } else { return loadingFinished || (prepared && enabledTrackCount == 0) ? -1 : currentTsLoadable != null ? currentTsLoadable.endTimeUs : previousTsLoadable.endTimeUs; } } private boolean isTsChunk(Chunk chunk) { return chunk instanceof TsChunk; } private boolean isPendingReset() { return pendingResetPositionUs != NO_RESET_PENDING; } private long getRetryDelayMillis(long errorCount) { return Math.min((errorCount - 1) * 1000, 5000); } /* package */ long usToMs(long timeUs) { return timeUs / 1000; } private void notifyLoadStarted(final long length, final int type, final int trigger, final Format format, final long mediaStartTimeUs, final long mediaEndTimeUs) { if (eventHandler != null && eventListener != null) { eventHandler.post(new Runnable() { @Override public void run() { eventListener.onLoadStarted(eventSourceId, length, type, trigger, format, usToMs(mediaStartTimeUs), usToMs(mediaEndTimeUs)); } }); } } private void notifyLoadCompleted(final long bytesLoaded, final int type, final int trigger, final Format format, final long mediaStartTimeUs, final long mediaEndTimeUs, final long elapsedRealtimeMs, final long loadDurationMs) { if (eventHandler != null && eventListener != null) { eventHandler.post(new Runnable() { @Override public void run() { eventListener.onLoadCompleted(eventSourceId, bytesLoaded, type, trigger, format, usToMs(mediaStartTimeUs), usToMs(mediaEndTimeUs), elapsedRealtimeMs, loadDurationMs); } }); } } private void notifyLoadCanceled(final long bytesLoaded) { if (eventHandler != null && eventListener != null) { eventHandler.post(new Runnable() { @Override public void run() { eventListener.onLoadCanceled(eventSourceId, bytesLoaded); } }); } } private void notifyLoadError(final IOException e) { if (eventHandler != null && eventListener != null) { eventHandler.post(new Runnable() { @Override public void run() { eventListener.onLoadError(eventSourceId, e); } }); } } private void notifyDownstreamFormatChanged(final Format format, final int trigger, final long positionUs) { if (eventHandler != null && eventListener != null) { eventHandler.post(new Runnable() { @Override public void run() { eventListener.onDownstreamFormatChanged(eventSourceId, format, trigger, usToMs(positionUs)); } }); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.security; import java.io.ByteArrayInputStream; import java.io.DataInputStream; import java.security.Principal; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import org.apache.geode.DataSerializable; import org.apache.geode.DataSerializer; import org.apache.geode.Instantiator; import org.apache.geode.cache.Cache; import org.apache.geode.cache.operations.GetOperationContext; import org.apache.geode.cache.operations.OperationContext; import org.apache.geode.cache.operations.OperationContext.OperationCode; import org.apache.geode.cache.operations.PutOperationContext; import org.apache.geode.cache.operations.QueryOperationContext; import org.apache.geode.cache.query.SelectResults; import org.apache.geode.cache.query.internal.CqEntry; import org.apache.geode.cache.query.internal.ResultsCollectionWrapper; import org.apache.geode.cache.query.types.ObjectType; import org.apache.geode.distributed.DistributedMember; import org.apache.geode.i18n.LogWriterI18n; import org.apache.geode.internal.HeapDataOutputStream; import org.apache.geode.internal.Version; import org.apache.geode.internal.i18n.LocalizedStrings; /** * An authorization implementation for testing that checks for authorization information in * post-operation filtering, removes that field and allows the operation only if the authorization * field in {@link ObjectWithAuthz} object allows the current principal. * * @since GemFire 5.5 */ public class FilterPostAuthorization implements AccessControl { private String principalName; private LogWriterI18n logger; static { Instantiator.register(new Instantiator(ObjectWithAuthz.class, ObjectWithAuthz.CLASSID) { @Override public DataSerializable newInstance() { return new ObjectWithAuthz(); } }, false); } public FilterPostAuthorization() { this.principalName = null; this.logger = null; } public static AccessControl create() { return new FilterPostAuthorization(); } public void init(Principal principal, DistributedMember remoteMember, Cache cache) throws NotAuthorizedException { this.principalName = (principal == null ? "" : principal.getName()); this.logger = cache.getSecurityLoggerI18n(); } private byte[] checkObjectAuth(byte[] serializedObj, boolean isObject) { if (!isObject) { return null; } ByteArrayInputStream bis = new ByteArrayInputStream(serializedObj); DataInputStream dis = new DataInputStream(bis); Object obj; try { obj = DataSerializer.readObject(dis); if (this.logger.finerEnabled()) { this.logger.finer("FilterPostAuthorization: successfully read object " + "from serialized object: " + obj); } } catch (Exception ex) { this.logger.severe( LocalizedStrings.FilterPostAuthorization_FILTERPOSTAUTHORIZATION_AN_EXCEPTION_WAS_THROWN_WHILE_TRYING_TO_DESERIALIZE, ex); return null; } obj = checkObjectAuth(obj); if (obj != null) { HeapDataOutputStream hos = new HeapDataOutputStream(serializedObj.length + 32, Version.CURRENT); try { DataSerializer.writeObject(obj, hos); return hos.toByteArray(); } catch (Exception ex) { this.logger.severe( LocalizedStrings.FilterPostAuthorization_FILTERPOSTAUTHORIZATION_AN_EXCEPTION_WAS_THROWN_WHILE_TRYING_TO_SERIALIZE, ex); } } return null; } private Object checkObjectAuth(Object value) { Object obj = value; if (value instanceof CqEntry) { obj = ((CqEntry) value).getValue(); } if (obj instanceof ObjectWithAuthz) { int lastChar = this.principalName.charAt(this.principalName.length() - 1) - '0'; lastChar %= 10; ObjectWithAuthz authzObj = (ObjectWithAuthz) obj; int authzIndex = ((Integer) authzObj.getAuthz()).intValue() - '0'; authzIndex %= 10; if ((lastChar == 0) || (authzIndex % lastChar != 0)) { this.logger.warning( LocalizedStrings.FilterPostAuthorization_FILTERPOSTAUTHORIZATION_THE_USER_0_IS_NOT_AUTHORIZED_FOR_THE_OBJECT_1, new Object[] {this.principalName, authzObj.getVal()}); return null; } else { if (this.logger.fineEnabled()) { this.logger.fine("FilterPostAuthorization: user [" + this.principalName + "] authorized for object: " + authzObj.getVal()); } if (value instanceof CqEntry) { return new CqEntry(((CqEntry) value).getKey(), authzObj.getVal()); } else { return authzObj.getVal(); } } } this.logger.warning( LocalizedStrings.FilterPostAuthorization_FILTERPOSTAUTHORIZATION_THE_OBJECT_OF_TYPE_0_IS_NOT_AN_INSTANCE_OF_1, new Object[] {obj.getClass(), ObjectWithAuthz.class}); return null; } public boolean authorizeOperation(String regionName, OperationContext context) { assert context.isPostOperation(); OperationCode opCode = context.getOperationCode(); if (opCode.isGet()) { GetOperationContext getContext = (GetOperationContext) context; Object value = getContext.getObject(); boolean isObject = getContext.isObject(); if (value != null) { if ((value = checkObjectAuth(value)) != null) { getContext.setObject(value, isObject); return true; } } else { byte[] serializedValue = getContext.getSerializedValue(); if ((serializedValue = checkObjectAuth(serializedValue, isObject)) != null) { getContext.setSerializedValue(serializedValue, isObject); return true; } } } else if (opCode.isPut()) { PutOperationContext putContext = (PutOperationContext) context; byte[] serializedValue = putContext.getSerializedValue(); boolean isObject = putContext.isObject(); if ((serializedValue = checkObjectAuth(serializedValue, isObject)) != null) { putContext.setSerializedValue(serializedValue, isObject); return true; } } else if (opCode.equals(OperationCode.PUTALL)) { // no need for now } else if (opCode.isQuery() || opCode.isExecuteCQ()) { QueryOperationContext queryContext = (QueryOperationContext) context; Object value = queryContext.getQueryResult(); if (value instanceof SelectResults) { SelectResults results = (SelectResults) value; List newResults = new ArrayList(); Iterator resultIter = results.iterator(); while (resultIter.hasNext()) { Object obj = resultIter.next(); if ((obj = checkObjectAuth(obj)) != null) { newResults.add(obj); } } if (results.isModifiable()) { results.clear(); results.addAll(newResults); } else { ObjectType constraint = results.getCollectionType().getElementType(); results = new ResultsCollectionWrapper(constraint, newResults); queryContext.setQueryResult(results); } return true; } else { return false; } } return false; } public void close() { this.principalName = null; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kafka.trogdor.coordinator; import org.apache.kafka.common.KafkaException; import org.apache.kafka.common.utils.Scheduler; import org.apache.kafka.common.utils.Time; import org.apache.kafka.common.utils.Utils; import org.apache.kafka.trogdor.common.Node; import org.apache.kafka.trogdor.common.Platform; import org.apache.kafka.trogdor.common.ThreadUtils; import org.apache.kafka.trogdor.rest.TaskDone; import org.apache.kafka.trogdor.rest.TaskPending; import org.apache.kafka.trogdor.rest.TaskRunning; import org.apache.kafka.trogdor.rest.TaskState; import org.apache.kafka.trogdor.rest.TaskStopping; import org.apache.kafka.trogdor.rest.TasksResponse; import org.apache.kafka.trogdor.task.TaskController; import org.apache.kafka.trogdor.task.TaskSpec; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.TreeMap; import java.util.TreeSet; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; /** * The TaskManager is responsible for managing tasks inside the Trogdor coordinator. * * The task manager has a single thread, managed by the executor. We start, stop, * and handle state changes to tasks by adding requests to the executor queue. * Because the executor is single threaded, no locks are needed when accessing * TaskManager data structures. * * The TaskManager maintains a state machine for each task. Tasks begin in the * PENDING state, waiting for their designated start time to arrive. * When their time arrives, they transition to the RUNNING state. In this state, * the NodeManager will start them, and monitor them. * * The TaskManager does not handle communication with the agents. This is handled * by the NodeManagers. There is one NodeManager per node being managed. * See {org.apache.kafka.trogdor.coordinator.NodeManager} for details. */ public final class TaskManager { private static final Logger log = LoggerFactory.getLogger(TaskManager.class); /** * The platform. */ private final Platform platform; /** * The scheduler to use for this coordinator. */ private final Scheduler scheduler; /** * The clock to use for this coordinator. */ private final Time time; /** * A map of task IDs to Task objects. */ private final Map<String, ManagedTask> tasks; /** * The executor used for handling Task state changes. */ private final ScheduledExecutorService executor; /** * Maps node names to node managers. */ private final Map<String, NodeManager> nodeManagers; /** * True if the TaskManager is shut down. */ private AtomicBoolean shutdown = new AtomicBoolean(false); TaskManager(Platform platform, Scheduler scheduler) { this.platform = platform; this.scheduler = scheduler; this.time = scheduler.time(); this.tasks = new HashMap<>(); this.executor = Executors.newSingleThreadScheduledExecutor( ThreadUtils.createThreadFactory("TaskManagerStateThread", false)); this.nodeManagers = new HashMap<>(); for (Node node : platform.topology().nodes().values()) { if (Node.Util.getTrogdorAgentPort(node) > 0) { this.nodeManagers.put(node.name(), new NodeManager(node, this)); } } log.info("Created TaskManager for agent(s) on: {}", Utils.join(nodeManagers.keySet(), ", ")); } enum ManagedTaskState { PENDING, RUNNING, STOPPING, DONE; } class ManagedTask { /** * The task id. */ final private String id; /** * The task specification. */ final private TaskSpec spec; /** * The task controller. */ final private TaskController controller; /** * The task state. */ private ManagedTaskState state; /** * The time when the task was started, or -1 if the task has not been started. */ private long startedMs = -1; /** * The time when the task was finished, or -1 if the task has not been finished. */ private long doneMs = -1; /** * True if the task was cancelled by a stop request. */ boolean cancelled = false; /** * If there is a task start scheduled, this is a future which can * be used to cancel it. */ private Future<?> startFuture = null; /** * The name of the worker nodes involved with this task. * Null if the task is not running. */ private Set<String> workers = null; /** * The names of the worker nodes which are still running this task. * Null if the task is not running. */ private Set<String> activeWorkers = null; /** * If this is non-empty, a message describing how this task failed. */ private String error = ""; ManagedTask(String id, TaskSpec spec, TaskController controller, ManagedTaskState state) { this.id = id; this.spec = spec; this.controller = controller; this.state = state; } void clearStartFuture() { if (startFuture != null) { startFuture.cancel(false); startFuture = null; } } long startDelayMs(long now) { if (now > spec.startMs()) { return 0; } return spec.startMs() - now; } TreeSet<String> findNodeNames() { Set<String> nodeNames = controller.targetNodes(platform.topology()); TreeSet<String> validNodeNames = new TreeSet<>(); TreeSet<String> nonExistentNodeNames = new TreeSet<>(); for (String nodeName : nodeNames) { if (nodeManagers.containsKey(nodeName)) { validNodeNames.add(nodeName); } else { nonExistentNodeNames.add(nodeName); } } if (!nonExistentNodeNames.isEmpty()) { throw new KafkaException("Unknown node names: " + Utils.join(nonExistentNodeNames, ", ")); } if (validNodeNames.isEmpty()) { throw new KafkaException("No node names specified."); } return validNodeNames; } void maybeSetError(String newError) { if (error.isEmpty()) { error = newError; } } TaskState taskState() { switch (state) { case PENDING: return new TaskPending(spec); case RUNNING: return new TaskRunning(spec, startedMs); case STOPPING: return new TaskStopping(spec, startedMs); case DONE: return new TaskDone(spec, startedMs, doneMs, error, cancelled); } throw new RuntimeException("unreachable"); } } /** * Create a task. * * @param id The ID of the task to create. * @param spec The specification of the task to create. * * @return The specification of the task with the given ID. * Note that if there was already a task with the given ID, * this may be different from the specification that was * requested. */ public TaskSpec createTask(final String id, TaskSpec spec) throws ExecutionException, InterruptedException { final TaskSpec existingSpec = executor.submit(new CreateTask(id, spec)).get(); if (existingSpec != null) { log.info("Ignoring request to create task {}, because there is already " + "a task with that id.", id); return existingSpec; } return spec; } /** * Handles a request to create a new task. Processed by the state change thread. */ class CreateTask implements Callable<TaskSpec> { private final String id; private final TaskSpec spec; CreateTask(String id, TaskSpec spec) { this.id = id; this.spec = spec; } @Override public TaskSpec call() throws Exception { ManagedTask task = tasks.get(id); if (task != null) { log.info("Task ID {} is already in use.", id); return task.spec; } TaskController controller = null; String failure = null; try { controller = spec.newController(id); } catch (Throwable t) { failure = "Failed to create TaskController: " + t.getMessage(); } if (failure != null) { log.info("Failed to create a new task {} with spec {}: {}", id, spec, failure); task = new ManagedTask(id, spec, null, ManagedTaskState.DONE); task.doneMs = time.milliseconds(); task.maybeSetError(failure); tasks.put(id, task); return null; } task = new ManagedTask(id, spec, controller, ManagedTaskState.PENDING); tasks.put(id, task); long delayMs = task.startDelayMs(time.milliseconds()); task.startFuture = scheduler.schedule(executor, new RunTask(task), delayMs); log.info("Created a new task {} with spec {}, scheduled to start {} ms from now.", id, spec, delayMs); return null; } } /** * Handles starting a task. Processed by the state change thread. */ class RunTask implements Callable<Void> { private final ManagedTask task; RunTask(ManagedTask task) { this.task = task; } @Override public Void call() throws Exception { task.clearStartFuture(); if (task.state != ManagedTaskState.PENDING) { log.info("Can't start task {}, because it is already in state {}.", task.id, task.state); return null; } TreeSet<String> nodeNames; try { nodeNames = task.findNodeNames(); } catch (Exception e) { log.error("Unable to find nodes for task {}", task.id, e); task.doneMs = time.milliseconds(); task.state = ManagedTaskState.DONE; task.maybeSetError("Unable to find nodes for task: " + e.getMessage()); return null; } log.info("Running task {} on node(s): {}", task.id, Utils.join(nodeNames, ", ")); task.state = ManagedTaskState.RUNNING; task.startedMs = time.milliseconds(); task.workers = nodeNames; task.activeWorkers = new HashSet<>(); for (String workerName : task.workers) { task.activeWorkers.add(workerName); nodeManagers.get(workerName).createWorker(task.id, task.spec); } return null; } } /** * Stop a task. * * @param id The ID of the task to stop. * @return The specification of the task which was stopped, or null if there * was no task found with the given ID. */ public TaskSpec stopTask(final String id) throws ExecutionException, InterruptedException { final TaskSpec spec = executor.submit(new CancelTask(id)).get(); return spec; } /** * Handles cancelling a task. Processed by the state change thread. */ class CancelTask implements Callable<TaskSpec> { private final String id; CancelTask(String id) { this.id = id; } @Override public TaskSpec call() throws Exception { ManagedTask task = tasks.get(id); if (task == null) { log.info("Can't cancel non-existent task {}.", id); return null; } switch (task.state) { case PENDING: task.cancelled = true; task.clearStartFuture(); task.doneMs = time.milliseconds(); task.state = ManagedTaskState.DONE; log.info("Stopped pending task {}.", id); break; case RUNNING: task.cancelled = true; if (task.activeWorkers.size() == 0) { log.info("Task {} is now complete with error: {}", id, task.error); task.doneMs = time.milliseconds(); task.state = ManagedTaskState.DONE; } else { for (String workerName : task.activeWorkers) { nodeManagers.get(workerName).stopWorker(id); } log.info("Cancelling task {} on worker(s): {}", id, Utils.join(task.activeWorkers, ", ")); task.state = ManagedTaskState.STOPPING; } break; case STOPPING: log.info("Can't cancel task {} because it is already stopping.", id); break; case DONE: log.info("Can't cancel task {} because it is already done.", id); break; } return task.spec; } } /** * A callback NodeManager makes to indicate that a worker has completed. * The task will transition to DONE once all workers are done. * * @param nodeName The node name. * @param id The worker name. * @param error An empty string if there is no error, or an error string. */ public void handleWorkerCompletion(String nodeName, String id, String error) { executor.submit(new HandleWorkerCompletion(nodeName, id, error)); } class HandleWorkerCompletion implements Callable<Void> { private final String nodeName; private final String id; private final String error; HandleWorkerCompletion(String nodeName, String id, String error) { this.nodeName = nodeName; this.id = id; this.error = error; } @Override public Void call() throws Exception { ManagedTask task = tasks.get(id); if (task == null) { log.error("Can't handle completion of unknown worker {} on node {}", id, nodeName); return null; } if ((task.state == ManagedTaskState.PENDING) || (task.state == ManagedTaskState.DONE)) { log.error("Task {} got unexpected worker completion from {} while " + "in {} state.", id, nodeName, task.state); return null; } boolean broadcastStop = false; if (task.state == ManagedTaskState.RUNNING) { task.state = ManagedTaskState.STOPPING; broadcastStop = true; } task.maybeSetError(error); task.activeWorkers.remove(nodeName); if (task.activeWorkers.size() == 0) { task.doneMs = time.milliseconds(); task.state = ManagedTaskState.DONE; log.info("Task {} is now complete on {} with error: {}", id, Utils.join(task.workers, ", "), task.error.isEmpty() ? "(none)" : task.error); } else if (broadcastStop) { log.info("Node {} stopped. Stopping task {} on worker(s): {}", id, Utils.join(task.activeWorkers, ", ")); for (String workerName : task.activeWorkers) { nodeManagers.get(workerName).stopWorker(id); } } return null; } } /** * Get information about the tasks being managed. */ public TasksResponse tasks() throws ExecutionException, InterruptedException { return executor.submit(new GetTasksResponse()).get(); } class GetTasksResponse implements Callable<TasksResponse> { @Override public TasksResponse call() throws Exception { TreeMap<String, TaskState> states = new TreeMap<>(); for (ManagedTask task : tasks.values()) { states.put(task.id, task.taskState()); } return new TasksResponse(states); } } /** * Initiate shutdown, but do not wait for it to complete. */ public void beginShutdown(boolean stopAgents) throws ExecutionException, InterruptedException { if (shutdown.compareAndSet(false, true)) { executor.submit(new Shutdown(stopAgents)); } } /** * Wait for shutdown to complete. May be called prior to beginShutdown. */ public void waitForShutdown() throws ExecutionException, InterruptedException { while (!executor.awaitTermination(1, TimeUnit.DAYS)) { } } class Shutdown implements Callable<Void> { private final boolean stopAgents; Shutdown(boolean stopAgents) { this.stopAgents = stopAgents; } @Override public Void call() throws Exception { log.info("Shutting down TaskManager{}.", stopAgents ? " and agents" : ""); for (NodeManager nodeManager : nodeManagers.values()) { nodeManager.beginShutdown(stopAgents); } for (NodeManager nodeManager : nodeManagers.values()) { nodeManager.waitForShutdown(); } executor.shutdown(); return null; } } };
/** * Copyright 2013 Twitter, Inc. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.twitter.crunch; import com.google.common.collect.Multimap; import com.google.common.collect.TreeMultimap; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.*; public class StableRdfMapping implements MappingFunction { private static final Logger logger = LoggerFactory.getLogger(StableRdfMapping.class); private final int rdf; private final int rf; private final PlacementRules rules; private final Map<String, List<String>> oldRdfMap; private final int rdfMin; private final int rdfMax; private final double targetBalance; private final int rackDiversity; private final Map<String, List<String>> migrationMap; private Map<Node, List<Node>> newRdfMap; private final boolean trackCapacity; private Map<String, Double> replicaCapacity = new HashMap<String, Double>(); private Map<String, Map<String, Double>> replicaUsage = new HashMap<String, Map<String, Double>>(); private final Crunch cruncher = new Crunch(); public StableRdfMapping(int rdf, int rf, PlacementRules rules, Map<String, List<String>> oldRdfMap, int rdfMin, int rdfMax, double targetBalance, int rackDiversity, boolean trackCapacity, Map<String, List<String>> migrationMap) { this.rdf = rdf; this.rf = rf; this.rules = rules; this.oldRdfMap = oldRdfMap; this.newRdfMap = null; this.rdfMin = rdfMin; this.rdfMax = rdfMax; this.targetBalance = targetBalance; this.rackDiversity = rackDiversity; this.trackCapacity = trackCapacity; this.migrationMap = migrationMap; } public StableRdfMapping(int rdf, int rf, PlacementRules rules, Map<String, List<String>> oldRdfMap, int rdfMin, int rdfMax, double targetBalance, int rackDiversity, boolean trackCapacity) { this(rdf, rf, rules, oldRdfMap, rdfMin, rdfMax, targetBalance, rackDiversity, trackCapacity, null); } public Map<String, List<String>> getNewRdfMap() { Map<String, List<String>> rdfMap = new HashMap<String, List<String>>(); for (Map.Entry<Node, List<Node>> entry: newRdfMap.entrySet()) { List<String> nodeList = new ArrayList<String>(); for (Node node: entry.getValue()) { nodeList.add(node.getName()); } rdfMap.put(entry.getKey().getName(), nodeList); } return rdfMap; } private void initializeCapcity(List<Node> allNodes, Map<Node, List<Node>> mapping) { if (!this.trackCapacity) return; // Calculate total weight double totalWeight = 0; for (Node node: allNodes) { totalWeight += node.getWeight(); } // Calculate replica capacity and initialize replica usage for (Node node: allNodes) { this.replicaCapacity.put(node.getName(), node.getWeight() / totalWeight); this.replicaUsage.put(node.getName(), new HashMap<String, Double>()); } for (Node node: mapping.keySet()) { updateReplicaUsage(mapping, node); } } private void updateReplicaUsage(Map<Node, List<Node>> mapping, Node primary) { if (!this.trackCapacity) return; final List<Node> replicas = mapping.get(primary); String primaryName = primary.getName(); // Nodes < rdfMin will be removed at the end if (replicas.size() < this.rdfMin) return; double totalReplicasWeight = 0; for (Node replica: replicas) { totalReplicasWeight += replica.getWeight(); } for (Node replica: replicas) { String replicaName = replica.getName(); Map<String, Double> usage = this.replicaUsage.get(replicaName); usage.put(primaryName, this.replicaCapacity.get(primaryName) * replica.getWeight() / totalReplicasWeight); } } private double getReplicaUsage(Node replica) { if (!this.trackCapacity) return 0; Map<String, Double> usage = replicaUsage.get(replica.getName()); double totalUsage = 0; for (String node: usage.keySet()) { totalUsage += usage.get(node); } return totalUsage; } private boolean hasCapacity(Node node) { if (!this.trackCapacity) return true; return this.replicaCapacity.get(node.getName()) > getReplicaUsage(node); } private boolean hasConflict(Node node, List<Node> existingNodes) { int conflicts = 0; for (Node replica: existingNodes) { if (!rules.acceptReplica(replica, node)) { conflicts ++; } } return (conflicts >= this.rackDiversity); } private Node findCandidate(Node ownerNode, Map<Node, List<Node>> candidateMap, Multimap<Integer, Node> nodeReplicaSizeMap, Map<Node, List<Node>> rdfMap) { Node candidate = null; List<Node> candidates = candidateMap.get(ownerNode); if (candidates == null) return null; // Find candidate for (Map.Entry<Integer, Node> entry: nodeReplicaSizeMap.entries()) { Node node = entry.getValue(); int replicaSize = entry.getKey(); if(!candidates.contains(node)) continue; if (replicaSize >= this.rdfMax) { // Remove nodes with RDF >= rdfMax candidates.remove(node); } else if (!hasCapacity(node)) { continue; } else if (hasConflict(node, rdfMap.get(ownerNode)) || hasConflict(ownerNode, rdfMap.get(node))) { candidates.remove(node); candidateMap.get(node).remove(ownerNode); continue; } else { if (migrationMap == null || !migrationMap.containsKey(ownerNode.getName()) || migrationMap.get(ownerNode.getName()).contains(node.getName())) { candidate = node; break; } else { if (candidate == null) { candidate = node; continue; } } } } return candidate; } private void buildRDFMapping(Node datacenter, Map<Node, List<Node>> mapping) throws MappingException { final List<Node> allNodes = datacenter.getAllLeafNodes(); initializeCapcity(allNodes, mapping); // Generate candidates Map<Node, List<Node>> candidateMap = new TreeMap<Node, List<Node>>(); for (Node node: mapping.keySet()) { for (Node candidate: mapping.keySet()) { if (mapping.get(candidate).size() >= this.rdfMax) continue; if (candidate != node && rules.acceptReplica(node, candidate) && !mapping.get(node).contains(candidate)) { List<Node> nodeList = candidateMap.get(node); if (nodeList == null) { nodeList = new ArrayList<Node>(); candidateMap.put(node, nodeList); } nodeList.add(candidate); } } } TreeMultimap<Integer, Node> nodeReplicaSizeMap = TreeMultimap.create(); for (Node node: mapping.keySet()) { nodeReplicaSizeMap.put(mapping.get(node).size(), node); } while(true) { // Pick node with least number of replicas Node candidate = null; Node minNode = null; int min = this.rdfMax; for (Map.Entry<Integer, Node> entry: nodeReplicaSizeMap.entries()) { int replicaSize = entry.getKey(); Node node = entry.getValue(); // break at this point since it sorted if (replicaSize >= this.rdfMax) break; Node findResult = findCandidate(node, candidateMap, nodeReplicaSizeMap, mapping); if (findResult != null) { candidate = findResult; minNode = node; min = replicaSize; break; } } if (minNode == null || mapping.get(minNode).size() >= this.rdfMin) break; // Pair the candidate up mapping.get(candidate).add(minNode); mapping.get(minNode).add(candidate); candidateMap.get(minNode).remove(candidate); candidateMap.get(candidate).remove(minNode); nodeReplicaSizeMap.remove(min, minNode); nodeReplicaSizeMap.put(min + 1, minNode); int candidateSize = mapping.get(candidate).size(); nodeReplicaSizeMap.remove(candidateSize - 1, candidate); nodeReplicaSizeMap.put(candidateSize, candidate); updateReplicaUsage(mapping, minNode); logger.info("Added {} for {}", candidate.getName(), minNode.getName()); } } private Map<Node,List<Node>> optimizeRDFMapping(Node topology) throws MappingException { Map<Node, List<Node>> mapping = new TreeMap<Node, List<Node>>(); for(Node datacenter : topology.findChildren(Types.DATA_CENTER)) { final List<Node> allNodes = datacenter.getAllLeafNodes(); Map<Node, List<Node>> dcMapping = new TreeMap<Node, List<Node>>(); // Remove dead nodes for(String nodeName: this.oldRdfMap.keySet()) { Node node = new Node(); node.setName(nodeName); if (allNodes.contains(node)) { node = allNodes.get(allNodes.indexOf(node)); if (node.isFailed() || node.getWeight() <= 0) continue; List<Node> replicas = new ArrayList<Node>(); for (String replicaName: this.oldRdfMap.get(nodeName)) { Node replica = new Node(); replica.setName(replicaName); if (allNodes.contains(replica)) { replica = allNodes.get(allNodes.indexOf(replica)); if (replica.isFailed() || replica.getWeight() <= 0) continue; replicas.add(replica); } } dcMapping.put(node, replicas); } } // Add new nodes for(Node node: allNodes) { if (node.isFailed() || node.getWeight() <= 0) continue; if (!dcMapping.containsKey(node)) { dcMapping.put(node, new ArrayList<Node>()); } } // Add removed nodes (<rdfMin) back for (Node node: dcMapping.keySet()) { for (Node replica : dcMapping.get(node)) { if (!this.oldRdfMap.containsKey(replica.getName())) { dcMapping.get(replica).add(node); } } } buildRDFMapping(datacenter, dcMapping); mapping.putAll(dcMapping); } return mapping; } private Map<Long,List<Node>> optimizeTargetBalance(List<Long> data, Node crunched) { Map<Long,List<Node>> map = null; RDFCRUSHMapping rdfMapping = new RDFCRUSHMapping(this.rf, this.rules, this.targetBalance); map = rdfMapping.createMapping(data, crunched, this.newRdfMap); logger.info(String.format("created mapping with target balance %.2f", this.targetBalance)); return map; } private void removeNodes(Node crunched) { // Remove nodes with RDF < minRDF List<Node> toBeRemoved = new ArrayList<Node>(); for (Node node : this.newRdfMap.keySet()) { if (this.newRdfMap.get(node).size() < this.rdfMin) toBeRemoved.add(node); } for (Node node: toBeRemoved) { this.newRdfMap.remove(node); Node parent = node.getParent(); Node child = node; while (parent != null && parent.getChildren().size() == 1) { child = parent; parent = parent.getParent(); } if (parent != null) parent.getChildren().remove(child); } } public Map<Long,List<Node>> computeMapping(List<Long> data, Node topology) { Node crunched = cruncher.makeCrunch(topology); long begin = System.nanoTime(); this.newRdfMap = optimizeRDFMapping(crunched); long end = System.nanoTime(); logger.info("time taken to create the RDF mapping: {} ms", (end - begin)/1000000L); removeNodes(crunched); crunched = cruncher.makeCrunch(crunched); begin = System.nanoTime(); Map<Long,List<Node>> map = optimizeTargetBalance(data, crunched); end = System.nanoTime(); logger.info("time taken to create mapping: {} ms", (end - begin)/1000000L); return map; } }
/** * All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jivesoftware.smackx.bytestreams.socks5; import static org.junit.Assert.*; import java.io.InputStream; import java.io.OutputStream; import java.net.ServerSocket; import java.net.Socket; import org.jivesoftware.smack.Connection; import org.jivesoftware.smack.SmackConfiguration; import org.jivesoftware.smack.XMPPException; import org.jivesoftware.smack.packet.IQ; import org.jivesoftware.smack.packet.Packet; import org.jivesoftware.smack.packet.XMPPError; import org.jivesoftware.smackx.bytestreams.socks5.Socks5BytestreamManager; import org.jivesoftware.smackx.bytestreams.socks5.Socks5BytestreamRequest; import org.jivesoftware.smackx.bytestreams.socks5.Socks5Utils; import org.jivesoftware.smackx.bytestreams.socks5.packet.Bytestream; import org.jivesoftware.util.ConnectionUtils; import org.jivesoftware.util.Protocol; import org.junit.After; import org.junit.Before; import org.junit.Test; /** * Tests for the Socks5BytestreamRequest class. * * @author Henning Staib */ public class Socks5ByteStreamRequestTest { // settings String initiatorJID = "initiator@xmpp-server/Smack"; String targetJID = "target@xmpp-server/Smack"; String xmppServer = "xmpp-server"; String proxyJID = "proxy.xmpp-server"; String proxyAddress = "127.0.0.1"; String sessionID = "session_id"; Protocol protocol; Connection connection; /** * Initialize fields used in the tests. */ @Before public void setup() { // build protocol verifier protocol = new Protocol(); // create mocked XMPP connection connection = ConnectionUtils.createMockedConnection(protocol, targetJID, xmppServer); } /** * Accepting a SOCKS5 Bytestream request should fail if the request doesn't contain any Socks5 * proxies. * * @throws Exception should not happen */ @Test public void shouldFailIfRequestHasNoStreamHosts() throws Exception { try { // build SOCKS5 Bytestream initialization request with no SOCKS5 proxies Bytestream bytestreamInitialization = Socks5PacketUtils.createBytestreamInitiation( initiatorJID, targetJID, sessionID); // get SOCKS5 Bytestream manager for connection Socks5BytestreamManager byteStreamManager = Socks5BytestreamManager.getBytestreamManager(connection); // build SOCKS5 Bytestream request with the bytestream initialization Socks5BytestreamRequest byteStreamRequest = new Socks5BytestreamRequest( byteStreamManager, bytestreamInitialization); // accept the stream (this is the call that is tested here) byteStreamRequest.accept(); fail("exception should be thrown"); } catch (XMPPException e) { assertTrue(e.getMessage().contains("Could not establish socket with any provided host")); } // verify targets response assertEquals(1, protocol.getRequests().size()); Packet targetResponse = protocol.getRequests().remove(0); assertTrue(IQ.class.isInstance(targetResponse)); assertEquals(initiatorJID, targetResponse.getTo()); assertEquals(IQ.Type.ERROR, ((IQ) targetResponse).getType()); assertEquals(XMPPError.Condition.item_not_found.toString(), ((IQ) targetResponse).getError().getCondition()); } /** * Accepting a SOCKS5 Bytestream request should fail if target is not able to connect to any of * the provided SOCKS5 proxies. * * @throws Exception */ @Test public void shouldFailIfRequestHasInvalidStreamHosts() throws Exception { try { // build SOCKS5 Bytestream initialization request Bytestream bytestreamInitialization = Socks5PacketUtils.createBytestreamInitiation( initiatorJID, targetJID, sessionID); // add proxy that is not running bytestreamInitialization.addStreamHost(proxyJID, proxyAddress, 7778); // get SOCKS5 Bytestream manager for connection Socks5BytestreamManager byteStreamManager = Socks5BytestreamManager.getBytestreamManager(connection); // build SOCKS5 Bytestream request with the bytestream initialization Socks5BytestreamRequest byteStreamRequest = new Socks5BytestreamRequest( byteStreamManager, bytestreamInitialization); // accept the stream (this is the call that is tested here) byteStreamRequest.accept(); fail("exception should be thrown"); } catch (XMPPException e) { assertTrue(e.getMessage().contains("Could not establish socket with any provided host")); } // verify targets response assertEquals(1, protocol.getRequests().size()); Packet targetResponse = protocol.getRequests().remove(0); assertTrue(IQ.class.isInstance(targetResponse)); assertEquals(initiatorJID, targetResponse.getTo()); assertEquals(IQ.Type.ERROR, ((IQ) targetResponse).getType()); assertEquals(XMPPError.Condition.item_not_found.toString(), ((IQ) targetResponse).getError().getCondition()); } /** * Target should not try to connect to SOCKS5 proxies that already failed twice. * * @throws Exception should not happen */ @Test public void shouldBlacklistInvalidProxyAfter2Failures() throws Exception { // build SOCKS5 Bytestream initialization request Bytestream bytestreamInitialization = Socks5PacketUtils.createBytestreamInitiation( initiatorJID, targetJID, sessionID); bytestreamInitialization.addStreamHost("invalid." + proxyJID, "127.0.0.2", 7778); // get SOCKS5 Bytestream manager for connection Socks5BytestreamManager byteStreamManager = Socks5BytestreamManager.getBytestreamManager(connection); // try to connect several times for (int i = 0; i < 2; i++) { try { // build SOCKS5 Bytestream request with the bytestream initialization Socks5BytestreamRequest byteStreamRequest = new Socks5BytestreamRequest( byteStreamManager, bytestreamInitialization); // set timeouts byteStreamRequest.setTotalConnectTimeout(600); byteStreamRequest.setMinimumConnectTimeout(300); // accept the stream (this is the call that is tested here) byteStreamRequest.accept(); fail("exception should be thrown"); } catch (XMPPException e) { assertTrue(e.getMessage().contains( "Could not establish socket with any provided host")); } // verify targets response assertEquals(1, protocol.getRequests().size()); Packet targetResponse = protocol.getRequests().remove(0); assertTrue(IQ.class.isInstance(targetResponse)); assertEquals(initiatorJID, targetResponse.getTo()); assertEquals(IQ.Type.ERROR, ((IQ) targetResponse).getType()); assertEquals(XMPPError.Condition.item_not_found.toString(), ((IQ) targetResponse).getError().getCondition()); } // create test data for stream byte[] data = new byte[] { 1, 2, 3 }; Socks5TestProxy socks5Proxy = Socks5TestProxy.getProxy(7779); assertTrue(socks5Proxy.isRunning()); // add a valid SOCKS5 proxy bytestreamInitialization.addStreamHost(proxyJID, proxyAddress, 7779); // build SOCKS5 Bytestream request with the bytestream initialization Socks5BytestreamRequest byteStreamRequest = new Socks5BytestreamRequest(byteStreamManager, bytestreamInitialization); // set timeouts byteStreamRequest.setTotalConnectTimeout(600); byteStreamRequest.setMinimumConnectTimeout(300); // accept the stream (this is the call that is tested here) InputStream inputStream = byteStreamRequest.accept().getInputStream(); // create digest to get the socket opened by target String digest = Socks5Utils.createDigest(sessionID, initiatorJID, targetJID); // test stream by sending some data OutputStream outputStream = socks5Proxy.getSocket(digest).getOutputStream(); outputStream.write(data); // verify that data is transferred correctly byte[] result = new byte[3]; inputStream.read(result); assertArrayEquals(data, result); // verify targets response assertEquals(1, protocol.getRequests().size()); Packet targetResponse = protocol.getRequests().remove(0); assertEquals(Bytestream.class, targetResponse.getClass()); assertEquals(initiatorJID, targetResponse.getTo()); assertEquals(IQ.Type.RESULT, ((Bytestream) targetResponse).getType()); assertEquals(proxyJID, ((Bytestream) targetResponse).getUsedHost().getJID()); } /** * Target should not not blacklist any SOCKS5 proxies regardless of failing connections. * * @throws Exception should not happen */ @Test public void shouldNotBlacklistInvalidProxy() throws Exception { // disable blacklisting Socks5BytestreamRequest.setConnectFailureThreshold(0); // build SOCKS5 Bytestream initialization request Bytestream bytestreamInitialization = Socks5PacketUtils.createBytestreamInitiation( initiatorJID, targetJID, sessionID); bytestreamInitialization.addStreamHost("invalid." + proxyJID, "127.0.0.2", 7778); // get SOCKS5 Bytestream manager for connection Socks5BytestreamManager byteStreamManager = Socks5BytestreamManager.getBytestreamManager(connection); // try to connect several times for (int i = 0; i < 10; i++) { try { // build SOCKS5 Bytestream request with the bytestream initialization Socks5BytestreamRequest byteStreamRequest = new Socks5BytestreamRequest( byteStreamManager, bytestreamInitialization); // set timeouts byteStreamRequest.setTotalConnectTimeout(600); byteStreamRequest.setMinimumConnectTimeout(300); // accept the stream (this is the call that is tested here) byteStreamRequest.accept(); fail("exception should be thrown"); } catch (XMPPException e) { assertTrue(e.getMessage().contains( "Could not establish socket with any provided host")); } // verify targets response assertEquals(1, protocol.getRequests().size()); Packet targetResponse = protocol.getRequests().remove(0); assertTrue(IQ.class.isInstance(targetResponse)); assertEquals(initiatorJID, targetResponse.getTo()); assertEquals(IQ.Type.ERROR, ((IQ) targetResponse).getType()); assertEquals(XMPPError.Condition.item_not_found.toString(), ((IQ) targetResponse).getError().getCondition()); } // enable blacklisting Socks5BytestreamRequest.setConnectFailureThreshold(2); } /** * If the SOCKS5 Bytestream request contains multiple SOCKS5 proxies and the first one doesn't * respond, the connection attempt to this proxy should not consume the whole timeout for * connecting to the proxies. * * @throws Exception should not happen */ @Test public void shouldNotTimeoutIfFirstSocks5ProxyDoesNotRespond() throws Exception { // start a local SOCKS5 proxy Socks5TestProxy socks5Proxy = Socks5TestProxy.getProxy(7778); // create a fake SOCKS5 proxy that doesn't respond to a request ServerSocket serverSocket = new ServerSocket(7779); // build SOCKS5 Bytestream initialization request Bytestream bytestreamInitialization = Socks5PacketUtils.createBytestreamInitiation( initiatorJID, targetJID, sessionID); bytestreamInitialization.addStreamHost(proxyJID, proxyAddress, 7779); bytestreamInitialization.addStreamHost(proxyJID, proxyAddress, 7778); // create test data for stream byte[] data = new byte[] { 1, 2, 3 }; // get SOCKS5 Bytestream manager for connection Socks5BytestreamManager byteStreamManager = Socks5BytestreamManager.getBytestreamManager(connection); // build SOCKS5 Bytestream request with the bytestream initialization Socks5BytestreamRequest byteStreamRequest = new Socks5BytestreamRequest(byteStreamManager, bytestreamInitialization); // set timeouts byteStreamRequest.setTotalConnectTimeout(2000); byteStreamRequest.setMinimumConnectTimeout(1000); // accept the stream (this is the call that is tested here) InputStream inputStream = byteStreamRequest.accept().getInputStream(); // assert that client tries to connect to dumb SOCKS5 proxy Socket socket = serverSocket.accept(); assertNotNull(socket); // create digest to get the socket opened by target String digest = Socks5Utils.createDigest(sessionID, initiatorJID, targetJID); // test stream by sending some data OutputStream outputStream = socks5Proxy.getSocket(digest).getOutputStream(); outputStream.write(data); // verify that data is transferred correctly byte[] result = new byte[3]; inputStream.read(result); assertArrayEquals(data, result); // verify targets response assertEquals(1, protocol.getRequests().size()); Packet targetResponse = protocol.getRequests().remove(0); assertEquals(Bytestream.class, targetResponse.getClass()); assertEquals(initiatorJID, targetResponse.getTo()); assertEquals(IQ.Type.RESULT, ((Bytestream) targetResponse).getType()); assertEquals(proxyJID, ((Bytestream) targetResponse).getUsedHost().getJID()); serverSocket.close(); } /** * Accepting the SOCKS5 Bytestream request should be successfully. * * @throws Exception should not happen */ @Test public void shouldAcceptSocks5BytestreamRequestAndReceiveData() throws Exception { // start a local SOCKS5 proxy Socks5TestProxy socks5Proxy = Socks5TestProxy.getProxy(7778); // build SOCKS5 Bytestream initialization request Bytestream bytestreamInitialization = Socks5PacketUtils.createBytestreamInitiation( initiatorJID, targetJID, sessionID); bytestreamInitialization.addStreamHost(proxyJID, proxyAddress, 7778); // create test data for stream byte[] data = new byte[] { 1, 2, 3 }; // get SOCKS5 Bytestream manager for connection Socks5BytestreamManager byteStreamManager = Socks5BytestreamManager.getBytestreamManager(connection); // build SOCKS5 Bytestream request with the bytestream initialization Socks5BytestreamRequest byteStreamRequest = new Socks5BytestreamRequest(byteStreamManager, bytestreamInitialization); // accept the stream (this is the call that is tested here) InputStream inputStream = byteStreamRequest.accept().getInputStream(); // create digest to get the socket opened by target String digest = Socks5Utils.createDigest(sessionID, initiatorJID, targetJID); // test stream by sending some data OutputStream outputStream = socks5Proxy.getSocket(digest).getOutputStream(); outputStream.write(data); // verify that data is transferred correctly byte[] result = new byte[3]; inputStream.read(result); assertArrayEquals(data, result); // verify targets response assertEquals(1, protocol.getRequests().size()); Packet targetResponse = protocol.getRequests().remove(0); assertEquals(Bytestream.class, targetResponse.getClass()); assertEquals(initiatorJID, targetResponse.getTo()); assertEquals(IQ.Type.RESULT, ((Bytestream) targetResponse).getType()); assertEquals(proxyJID, ((Bytestream) targetResponse).getUsedHost().getJID()); } /** * Stop eventually started local SOCKS5 test proxy. */ @After public void cleanUp() { Socks5TestProxy.stopProxy(); SmackConfiguration.setLocalSocks5ProxyEnabled(true); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.server.namenode; import org.apache.hadoop.fs.StorageType; import org.apache.hadoop.hdfs.protocol.DSQuotaExceededException; import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.hdfs.protocol.NSQuotaExceededException; import org.apache.hadoop.hdfs.protocol.QuotaExceededException; import org.apache.hadoop.hdfs.protocol.QuotaByStorageTypeExceededException; import org.apache.hadoop.hdfs.server.namenode.snapshot.Snapshot; import org.apache.hadoop.hdfs.util.EnumCounters; import org.apache.hadoop.security.AccessControlException; /** * Quota feature for {@link INodeDirectory}. */ public final class DirectoryWithQuotaFeature implements INode.Feature { public static final long DEFAULT_NAMESPACE_QUOTA = Long.MAX_VALUE; public static final long DEFAULT_STORAGE_SPACE_QUOTA = HdfsConstants.QUOTA_RESET; private QuotaCounts quota; private QuotaCounts usage; public static class Builder { private QuotaCounts quota; private QuotaCounts usage; public Builder() { this.quota = new QuotaCounts.Builder().nameSpace(DEFAULT_NAMESPACE_QUOTA). storageSpace(DEFAULT_STORAGE_SPACE_QUOTA). typeSpaces(DEFAULT_STORAGE_SPACE_QUOTA).build(); this.usage = new QuotaCounts.Builder().nameSpace(1).build(); } public Builder nameSpaceQuota(long nameSpaceQuota) { this.quota.setNameSpace(nameSpaceQuota); return this; } public Builder storageSpaceQuota(long spaceQuota) { this.quota.setStorageSpace(spaceQuota); return this; } public Builder typeQuotas(EnumCounters<StorageType> typeQuotas) { this.quota.setTypeSpaces(typeQuotas); return this; } public Builder typeQuota(StorageType type, long quota) { this.quota.setTypeSpace(type, quota); return this; } public DirectoryWithQuotaFeature build() { return new DirectoryWithQuotaFeature(this); } } private DirectoryWithQuotaFeature(Builder builder) { this.quota = builder.quota; this.usage = builder.usage; } /** @return the quota set or -1 if it is not set. */ QuotaCounts getQuota() { return new QuotaCounts.Builder().quotaCount(this.quota).build(); } /** Set this directory's quota * * @param nsQuota Namespace quota to be set * @param ssQuota Storagespace quota to be set * @param type Storage type of the storage space quota to be set. * To set storagespace/namespace quota, type must be null. */ void setQuota(long nsQuota, long ssQuota, StorageType type) { if (type != null) { this.quota.setTypeSpace(type, ssQuota); } else { setQuota(nsQuota, ssQuota); } } void setQuota(long nsQuota, long ssQuota) { this.quota.setNameSpace(nsQuota); this.quota.setStorageSpace(ssQuota); } void setQuota(long quota, StorageType type) { this.quota.setTypeSpace(type, quota); } /** Set storage type quota in a batch. (Only used by FSImage load) * * @param tsQuotas type space counts for all storage types supporting quota */ void setQuota(EnumCounters<StorageType> tsQuotas) { this.quota.setTypeSpaces(tsQuotas); } /** * Add current quota usage to counts and return the updated counts * @param counts counts to be added with current quota usage * @return counts that have been added with the current qutoa usage */ QuotaCounts AddCurrentSpaceUsage(QuotaCounts counts) { counts.add(this.usage); return counts; } ContentSummaryComputationContext computeContentSummary(final INodeDirectory dir, final ContentSummaryComputationContext summary) throws AccessControlException { final long original = summary.getCounts().getStoragespace(); long oldYieldCount = summary.getYieldCount(); dir.computeDirectoryContentSummary(summary, Snapshot.CURRENT_STATE_ID); // Check only when the content has not changed in the middle. if (oldYieldCount == summary.getYieldCount()) { checkStoragespace(dir, summary.getCounts().getStoragespace() - original); } return summary; } private void checkStoragespace(final INodeDirectory dir, final long computed) { if (-1 != quota.getStorageSpace() && usage.getStorageSpace() != computed) { NameNode.LOG.error("BUG: Inconsistent storagespace for directory " + dir.getFullPathName() + ". Cached = " + usage.getStorageSpace() + " != Computed = " + computed); } } void addSpaceConsumed(final INodeDirectory dir, final QuotaCounts counts, boolean verify) throws QuotaExceededException { if (dir.isQuotaSet()) { // The following steps are important: // check quotas in this inode and all ancestors before changing counts // so that no change is made if there is any quota violation. // (1) verify quota in this inode if (verify) { verifyQuota(counts); } // (2) verify quota and then add count in ancestors dir.addSpaceConsumed2Parent(counts, verify); // (3) add count in this inode addSpaceConsumed2Cache(counts); } else { dir.addSpaceConsumed2Parent(counts, verify); } } /** Update the space/namespace/type usage of the tree * * @param delta the change of the namespace/space/type usage */ public void addSpaceConsumed2Cache(QuotaCounts delta) { usage.add(delta); } /** * Sets namespace and storagespace take by the directory rooted * at this INode. This should be used carefully. It does not check * for quota violations. * * @param namespace size of the directory to be set * @param storagespace storage space take by all the nodes under this directory * @param typespaces counters of storage type usage */ void setSpaceConsumed(long namespace, long storagespace, EnumCounters<StorageType> typespaces) { usage.setNameSpace(namespace); usage.setStorageSpace(storagespace); usage.setTypeSpaces(typespaces); } void setSpaceConsumed(QuotaCounts c) { usage.setNameSpace(c.getNameSpace()); usage.setStorageSpace(c.getStorageSpace()); usage.setTypeSpaces(c.getTypeSpaces()); } /** @return the namespace and storagespace and typespace consumed. */ public QuotaCounts getSpaceConsumed() { return new QuotaCounts.Builder().quotaCount(usage).build(); } /** Verify if the namespace quota is violated after applying delta. */ private void verifyNamespaceQuota(long delta) throws NSQuotaExceededException { if (Quota.isViolated(quota.getNameSpace(), usage.getNameSpace(), delta)) { throw new NSQuotaExceededException(quota.getNameSpace(), usage.getNameSpace() + delta); } } /** Verify if the storagespace quota is violated after applying delta. */ private void verifyStoragespaceQuota(long delta) throws DSQuotaExceededException { if (Quota.isViolated(quota.getStorageSpace(), usage.getStorageSpace(), delta)) { throw new DSQuotaExceededException(quota.getStorageSpace(), usage.getStorageSpace() + delta); } } private void verifyQuotaByStorageType(EnumCounters<StorageType> typeDelta) throws QuotaByStorageTypeExceededException { if (!isQuotaByStorageTypeSet()) { return; } for (StorageType t: StorageType.getTypesSupportingQuota()) { if (!isQuotaByStorageTypeSet(t)) { continue; } if (Quota.isViolated(quota.getTypeSpace(t), usage.getTypeSpace(t), typeDelta.get(t))) { throw new QuotaByStorageTypeExceededException( quota.getTypeSpace(t), usage.getTypeSpace(t) + typeDelta.get(t), t); } } } /** * @throws QuotaExceededException if namespace, storagespace or storage type * space quota is violated after applying the deltas. */ void verifyQuota(QuotaCounts counts) throws QuotaExceededException { verifyNamespaceQuota(counts.getNameSpace()); verifyStoragespaceQuota(counts.getStorageSpace()); verifyQuotaByStorageType(counts.getTypeSpaces()); } boolean isQuotaSet() { return quota.anyNsSsCountGreaterOrEqual(0) || quota.anyTypeSpaceCountGreaterOrEqual(0); } boolean isQuotaByStorageTypeSet() { return quota.anyTypeSpaceCountGreaterOrEqual(0); } boolean isQuotaByStorageTypeSet(StorageType t) { return quota.getTypeSpace(t) >= 0; } private String namespaceString() { return "namespace: " + (quota.getNameSpace() < 0? "-": usage.getNameSpace() + "/" + quota.getNameSpace()); } private String storagespaceString() { return "storagespace: " + (quota.getStorageSpace() < 0? "-": usage.getStorageSpace() + "/" + quota.getStorageSpace()); } private String typeSpaceString() { StringBuilder sb = new StringBuilder(); for (StorageType t : StorageType.getTypesSupportingQuota()) { sb.append("StorageType: " + t + (quota.getTypeSpace(t) < 0? "-": usage.getTypeSpace(t) + "/" + usage.getTypeSpace(t))); } return sb.toString(); } @Override public String toString() { return "Quota[" + namespaceString() + ", " + storagespaceString() + ", " + typeSpaceString() + "]"; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.query; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; import com.google.common.collect.Ordering; import org.apache.druid.guice.annotations.ExtensionPoint; import org.apache.druid.java.util.common.granularity.Granularities; import org.apache.druid.java.util.common.granularity.Granularity; import org.apache.druid.java.util.common.granularity.PeriodGranularity; import org.apache.druid.query.spec.QuerySegmentSpec; import org.joda.time.DateTimeZone; import org.joda.time.Duration; import org.joda.time.Interval; import java.util.List; import java.util.Map; import java.util.Objects; /** */ @ExtensionPoint public abstract class BaseQuery<T extends Comparable<T>> implements Query<T> { public static void checkInterrupted() { if (Thread.interrupted()) { throw new QueryInterruptedException(new InterruptedException()); } } public static final String QUERYID = "queryId"; private final DataSource dataSource; private final boolean descending; private final Map<String, Object> context; private final QuerySegmentSpec querySegmentSpec; private volatile Duration duration; private final Granularity granularity; public BaseQuery( DataSource dataSource, QuerySegmentSpec querySegmentSpec, boolean descending, Map<String, Object> context ) { this(dataSource, querySegmentSpec, descending, context, Granularities.ALL); } public BaseQuery( DataSource dataSource, QuerySegmentSpec querySegmentSpec, boolean descending, Map<String, Object> context, Granularity granularity ) { Preconditions.checkNotNull(dataSource, "dataSource can't be null"); Preconditions.checkNotNull(querySegmentSpec, "querySegmentSpec can't be null"); Preconditions.checkNotNull(granularity, "Must specify a granularity"); this.dataSource = dataSource; this.context = context; this.querySegmentSpec = querySegmentSpec; this.descending = descending; this.granularity = granularity; } @JsonProperty @Override public DataSource getDataSource() { return dataSource; } @JsonProperty @Override public boolean isDescending() { return descending; } @JsonProperty("intervals") public QuerySegmentSpec getQuerySegmentSpec() { return querySegmentSpec; } @Override public QueryRunner<T> getRunner(QuerySegmentWalker walker) { return getQuerySegmentSpecForLookUp(this).lookup(this, walker); } @VisibleForTesting public static QuerySegmentSpec getQuerySegmentSpecForLookUp(BaseQuery query) { if (query.getDataSource() instanceof QueryDataSource) { QueryDataSource ds = (QueryDataSource) query.getDataSource(); Query subquery = ds.getQuery(); if (subquery instanceof BaseQuery) { return getQuerySegmentSpecForLookUp((BaseQuery) subquery); } throw new IllegalStateException("Invalid subquery type " + subquery.getClass()); } return query.getQuerySegmentSpec(); } @Override public List<Interval> getIntervals() { return querySegmentSpec.getIntervals(); } @Override public Duration getDuration() { if (duration == null) { Duration totalDuration = new Duration(0); for (Interval interval : querySegmentSpec.getIntervals()) { if (interval != null) { totalDuration = totalDuration.plus(interval.toDuration()); } } duration = totalDuration; } return duration; } @Override @JsonProperty public Granularity getGranularity() { return granularity; } @Override public DateTimeZone getTimezone() { return granularity instanceof PeriodGranularity ? ((PeriodGranularity) granularity).getTimeZone() : DateTimeZone.UTC; } @Override @JsonProperty public Map<String, Object> getContext() { return context; } @Override public <ContextType> ContextType getContextValue(String key) { return context == null ? null : (ContextType) context.get(key); } @Override public <ContextType> ContextType getContextValue(String key, ContextType defaultValue) { ContextType retVal = getContextValue(key); return retVal == null ? defaultValue : retVal; } @Override public boolean getContextBoolean(String key, boolean defaultValue) { return QueryContexts.parseBoolean(this, key, defaultValue); } /** * @deprecated use {@link #computeOverriddenContext(Map, Map) computeOverriddenContext(getContext(), overrides))} * instead. This method may be removed in the next minor or major version of Druid. */ @Deprecated protected Map<String, Object> computeOverridenContext(final Map<String, Object> overrides) { return computeOverriddenContext(getContext(), overrides); } protected static Map<String, Object> computeOverriddenContext( final Map<String, Object> context, final Map<String, Object> overrides ) { Map<String, Object> overridden = Maps.newTreeMap(); if (context != null) { overridden.putAll(context); } overridden.putAll(overrides); return overridden; } @Override public Ordering<T> getResultOrdering() { Ordering<T> retVal = Ordering.natural(); return descending ? retVal.reverse() : retVal; } @Override public String getId() { return (String) getContextValue(QUERYID); } @Override public Query withId(String id) { return withOverriddenContext(ImmutableMap.of(QUERYID, id)); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } BaseQuery<?> baseQuery = (BaseQuery<?>) o; return descending == baseQuery.descending && Objects.equals(dataSource, baseQuery.dataSource) && Objects.equals(context, baseQuery.context) && Objects.equals(querySegmentSpec, baseQuery.querySegmentSpec) && Objects.equals(duration, baseQuery.duration) && Objects.equals(granularity, baseQuery.granularity); } @Override public int hashCode() { return Objects.hash(dataSource, descending, context, querySegmentSpec, duration, granularity); } }
/* * ARX: Powerful Data Anonymization * Copyright 2012 - 2018 Fabian Prasser and contributors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.deidentifier.arx; import java.io.File; import java.io.IOException; import java.io.OutputStream; import java.text.ParseException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; import org.apache.commons.math3.util.Pair; import org.deidentifier.arx.ARXLattice.ARXNode; import org.deidentifier.arx.ARXPopulationModel.Region; import org.deidentifier.arx.DataHandleInternal.InterruptHandler; import org.deidentifier.arx.DataType.ARXDate; import org.deidentifier.arx.DataType.ARXDecimal; import org.deidentifier.arx.DataType.ARXInteger; import org.deidentifier.arx.DataType.DataTypeDescription; import org.deidentifier.arx.aggregates.StatisticsBuilder; import org.deidentifier.arx.certificate.elements.ElementData; import org.deidentifier.arx.io.CSVDataOutput; import org.deidentifier.arx.io.CSVSyntax; import org.deidentifier.arx.risk.RiskEstimateBuilder; import org.deidentifier.arx.risk.RiskModelHistogram; import cern.colt.Swapper; import com.carrotsearch.hppc.ObjectIntOpenHashMap; /** * This class provides access to dictionary encoded data. Furthermore, the data * is linked to the associated input or output data. This means that, e.g., if * the input data is sorted, the output data will be sorted accordingly. This * ensures that original tuples and their generalized counterpart will always * have the same row index, which is important for many use cases, e.g., for * graphical tools that allow to compare the original dataset to generalized * versions. * * @author Fabian Prasser * @author Florian Kohlmayer */ public abstract class DataHandle { /** The data types. */ protected DataType<?>[] columnToDataType = null; /** The data definition. */ protected DataDefinition definition = null; /** The header. */ protected String[] header = null; /** The header. */ protected ObjectIntOpenHashMap<String> headerMap = null; /** The node. */ protected ARXNode node = null; /** The current registry. */ protected DataRegistry registry = null; /** The current research subset. */ protected DataHandle subset = null; /** * Returns the name of the specified column. * * @param col The column index * @return the attribute name */ public abstract String getAttributeName(int col); /** * Returns the index of the given attribute, -1 if it is not in the header. * * @param attribute the attribute * @return the column index of */ public int getColumnIndexOf(final String attribute) { checkRegistry(); return headerMap.getOrDefault(attribute, -1); } /** * Returns the according data type. * * @param attribute the attribute * @return the data type */ public DataType<?> getDataType(final String attribute) { checkRegistry(); return definition.getDataType(attribute); } /** * Returns a date/time value from the specified cell. * * @param row The cell's row index * @param col The cell's column index * @return the date * @throws ParseException the parse exception */ public Date getDate(int row, int col) throws ParseException { String value = getValue(row, col); DataType<?> type = getDataType(getAttributeName(col)); if (type instanceof ARXDate) { return ((ARXDate) type).parse(value); } else { throw new ParseException("Invalid datatype: " + type.getClass().getSimpleName(), col); } } /** * Returns the data definition. * * @return the definition */ public DataDefinition getDefinition() { checkRegistry(); return definition; } /** * Returns an array containing the distinct values in the given column. * * @param column The column to process * @return the distinct values */ public final String[] getDistinctValues(int column) { return getDistinctValues(column, false, new InterruptHandler() { @Override public void checkInterrupt() { // Nothing to do } }); } /** * Returns a double value from the specified cell. * * @param row The cell's row index * @param col The cell's column index * @return the double * @throws ParseException the parse exception */ public Double getDouble(int row, int col) throws ParseException { String value = getValue(row, col); DataType<?> type = getDataType(getAttributeName(col)); if (type instanceof ARXDecimal) { return ((ARXDecimal) type).parse(value); } else if (type instanceof ARXInteger) { Long _long = ((ARXInteger) type).parse(value); return _long == null ? null : _long.doubleValue(); } else { throw new ParseException("Invalid datatype: " + type.getClass().getSimpleName(), col); } } /** * Returns a float value from the specified cell. * * @param row The cell's row index * @param col The cell's column index * @return the float * @throws ParseException the parse exception */ public Float getFloat(int row, int col) throws ParseException { String value = getValue(row, col); DataType<?> type = getDataType(getAttributeName(col)); if (type instanceof ARXDecimal) { Double _double = ((ARXDecimal) type).parse(value); return _double == null ? null : _double.floatValue(); } else if (type instanceof ARXInteger) { Long _long = ((ARXInteger) type).parse(value); return _long == null ? null : _long.floatValue(); } else { throw new ParseException("Invalid datatype: " + type.getClass().getSimpleName(), col); } } /** * Returns the generalization level for the attribute. * * @param attribute the attribute * @return the generalization */ public abstract int getGeneralization(String attribute); /** * Returns an int value from the specified cell. * * @param row The cell's row index * @param col The cell's column index * @return the int * @throws ParseException the parse exception */ public Integer getInt(int row, int col) throws ParseException { String value = getValue(row, col); DataType<?> type = getDataType(getAttributeName(col)); if (type instanceof ARXInteger) { Long _long = ((ARXInteger) type).parse(value); return _long == null ? null : _long.intValue(); } else { throw new ParseException("Invalid datatype: " + type.getClass().getSimpleName(), col); } } /** * Returns a long value from the specified cell. * * @param row The cell's row index * @param col The cell's column index * @return the long * @throws ParseException the parse exception */ public Long getLong(int row, int col) throws ParseException { String value = getValue(row, col); DataType<?> type = getDataType(getAttributeName(col)); if (type instanceof ARXInteger) { return ((ARXInteger) type).parse(value); } else { throw new ParseException("Invalid datatype: " + type.getClass().getSimpleName(), col); } } /** * Returns a mapping from data types to the relative number of values that conform to the according type. * This method uses the default locale. * This method only returns types that match at least 80% of all values in the column . * * @param column the column * @return the matching data types */ public List<Pair<DataType<?>, Double>> getMatchingDataTypes(int column) { return getMatchingDataTypes(column, Locale.getDefault(), 0.8d); } /** * Returns a mapping from data types to the relative number of values that conform to the according type for a given wrapped class. * This method uses the default locale. * This method only returns types that match at least 80% of all values in the column . * * @param <U> the generic type * @param column the column * @param clazz The wrapped class * @return the matching data types */ public <U> List<Pair<DataType<?>, Double>> getMatchingDataTypes(int column, Class<U> clazz) { return getMatchingDataTypes(column, clazz, Locale.getDefault(), 0.8d); } /** * Returns a mapping from data types to the relative number of values that conform to the according type for a given wrapped class. * This method uses the default locale. * * @param <U> the generic type * @param column the column * @param clazz The wrapped class * @param threshold Relative minimal number of values that must match to include a data type in the results * @return the matching data types */ public <U> List<Pair<DataType<?>, Double>> getMatchingDataTypes(int column, Class<U> clazz, double threshold) { return getMatchingDataTypes(column, clazz, Locale.getDefault(), threshold); } /** * Returns a mapping from data types to the relative number of values that conform to the according type for a given wrapped class. * This method only returns types that match at least 80% of all values in the column . * * @param <U> the generic type * @param column the column * @param clazz The wrapped class * @param locale The locale to use * @return the matching data types */ public <U> List<Pair<DataType<?>, Double>> getMatchingDataTypes(int column, Class<U> clazz, Locale locale) { return getMatchingDataTypes(column, clazz, locale, 0.8d); } /** * Returns a mapping from data types to the relative number of values that conform to the according type for a given wrapped class. * * @param <U> the generic type * @param column the column * @param clazz The wrapped class * @param locale The locale to use * @param threshold Relative minimal number of values that must match to include a data type in the results * @return the matching data types */ public <U> List<Pair<DataType<?>, Double>> getMatchingDataTypes(int column, Class<U> clazz, Locale locale, double threshold) { checkRegistry(); checkColumn(column); double distinct = this.getDistinctValues(column).length; List<Pair<DataType<?>, Double>> result = new ArrayList<Pair<DataType<?>, Double>>(); DataTypeDescription<U> description = DataType.list(clazz); if (description == null) { return result; } if (description.hasFormat()) { for (String format : description.getExampleFormats()) { DataType<U> type = description.newInstance(format, locale); double matching = getNumConformingValues(column, type) / distinct; if (matching >= threshold) { result.add(new Pair<DataType<?>, Double>(type, matching)); } } } else { DataType<U> type = description.newInstance(); double matching = getNumConformingValues(column, type) / distinct; if (matching >= threshold) { result.add(new Pair<DataType<?>, Double>(type, matching)); } } return result; } /** * Returns a mapping from data types to the relative number of values that conform to the according type. * This method uses the default locale. * * @param column the column * @param threshold Relative minimal number of values that must match to include a data type in the results * @return the matching data types */ public List<Pair<DataType<?>, Double>> getMatchingDataTypes(int column, double threshold) { return getMatchingDataTypes(column, Locale.getDefault(), threshold); } /** * Returns a mapping from data types to the relative number of values that conform to the according type * This method only returns types that match at least 80% of all values in the column . * * @param column the column * @param locale The locale to use * @return the matching data types */ public List<Pair<DataType<?>, Double>> getMatchingDataTypes(int column, Locale locale) { return getMatchingDataTypes(column, locale, 0.8d); } /** * Returns a mapping from data types to the relative number of values that conform to the according type. * * @param column the column * @param locale The locale to use * @param threshold Relative minimal number of values that must match to include a data type in the results * @return the matching data types */ public List<Pair<DataType<?>, Double>> getMatchingDataTypes(int column, Locale locale, double threshold) { checkRegistry(); checkColumn(column); List<Pair<DataType<?>, Double>> result = new ArrayList<Pair<DataType<?>, Double>>(); result.addAll(getMatchingDataTypes(column, Long.class, locale, threshold)); result.addAll(getMatchingDataTypes(column, Date.class, locale, threshold)); result.addAll(getMatchingDataTypes(column, Double.class, locale, threshold)); result.add(new Pair<DataType<?>, Double>(DataType.STRING, 1.0d)); // Sort order final Map<Class<?>, Integer> order = new HashMap<Class<?>, Integer>(); order.put(Long.class, 0); order.put(Date.class, 1); order.put(Double.class, 2); order.put(String.class, 3); // Sort Collections.sort(result, new Comparator<Pair<DataType<?>, Double>>() { public int compare(Pair<DataType<?>, Double> o1, Pair<DataType<?>, Double> o2) { // Sort by matching quality int cmp = o1.getSecond().compareTo(o2.getSecond()); if (cmp != 0) return -cmp; // Sort by order int order1 = order.get(o1.getFirst().getDescription().getWrappedClass()); int order2 = order.get(o2.getFirst().getDescription().getWrappedClass()); return Integer.compare(order1, order2); } }); return result; } /** * Returns a set of values that do not conform to the given data type. * * @param column The column to test * @param type The type to test * @param max The maximal number of values returned by this method * @return the non conforming values */ public String[] getNonConformingValues(int column, DataType<?> type, int max) { checkRegistry(); checkColumn(column); Set<String> result = new HashSet<String>(); for (String value : this.getDistinctValues(column)) { if (!type.isValid(value)) { result.add(value); } if (result.size() == max) { break; } } return result.toArray(new String[result.size()]); } /** * Returns the number of columns in the dataset. * * @return the num columns */ public abstract int getNumColumns(); /** * Returns the number of (distinct) values that conform to the given data type. * * @param column The column to test * @param type The type to test * @return the num conforming values */ public int getNumConformingValues(int column, DataType<?> type) { checkRegistry(); checkColumn(column); int count = 0; for (String value : this.getDistinctValues(column)) { count += type.isValid(value) ? 1 : 0; } return count; } /** * Returns the number of rows in the dataset. * * @return the num rows */ public abstract int getNumRows(); /** * Returns a risk estimator, using the US population if required * @return */ public RiskEstimateBuilder getRiskEstimator() { return getRiskEstimator(ARXPopulationModel.create(Region.USA), getDefinition().getQuasiIdentifyingAttributes()); } /** * Returns a risk estimator * @param model * @return */ public RiskEstimateBuilder getRiskEstimator(ARXPopulationModel model) { return getRiskEstimator(model, getDefinition().getQuasiIdentifyingAttributes()); } /** * Returns a risk estimator * @param model * @param config * @return */ public RiskEstimateBuilder getRiskEstimator(ARXPopulationModel model, ARXSolverConfiguration config) { return getRiskEstimator(model, getDefinition().getQuasiIdentifyingAttributes(), config); } /** * Returns a risk estimator for the given set of equivalence classes. Saves resources by re-using existing classes * @param model * @param classes * @return */ public RiskEstimateBuilder getRiskEstimator(ARXPopulationModel model, RiskModelHistogram classes) { return new RiskEstimateBuilder(model, new DataHandleInternal(this), classes, getConfiguration()); } /** * Returns a risk estimator for the given set of equivalence classes. Saves resources by re-using existing classes * @param model * @param classes * @param config * @return */ public RiskEstimateBuilder getRiskEstimator(ARXPopulationModel model, RiskModelHistogram classes, ARXSolverConfiguration config) { return new RiskEstimateBuilder(model, new DataHandleInternal(this), classes, config, getConfiguration()); } /** * Returns a risk estimator for the given set of quasi-identifiers * @param model * @param qis * @return */ public RiskEstimateBuilder getRiskEstimator(ARXPopulationModel model, Set<String> qis) { return new RiskEstimateBuilder(model, new DataHandleInternal(this), qis, getConfiguration()); } /** * Returns a risk estimator for the given set of quasi-identifiers * @param model * @param qis * @param config * @return */ public RiskEstimateBuilder getRiskEstimator(ARXPopulationModel model, Set<String> qis, ARXSolverConfiguration config) { return new RiskEstimateBuilder(model, new DataHandleInternal(this), qis, config, getConfiguration()); } /** * Returns an object providing access to basic descriptive statistics about the data represented * by this handle. * * @return the statistics */ public abstract StatisticsBuilder getStatistics(); /** * Returns the transformation . * * @return the transformation */ public ARXNode getTransformation() { return node; } /** * Returns the value in the specified cell. * * @param row The cell's row index * @param col The cell's column index * @return the value */ public abstract String getValue(int row, int col); /** * Returns a new data handle that represents a context specific view on the dataset. * * @return the view */ public DataHandle getView() { checkRegistry(); if (subset == null) { return this; } else { return subset; } } /** * Has this handle been optimized with local recoding? * @return */ public boolean isOptimized() { checkRegistry(); return false; } /** * Determines whether this handle is orphaned, i.e., should not be used anymore * * @return true, if is orphaned */ public boolean isOrphaned() { return registry == null; } /** * Determines whether a given row is an outlier in the currently associated * data transformation. * * @param row the row * @return true, if is outlier */ public boolean isOutlier(int row) { checkRegistry(); return registry.isOutlier(this, row); } /** * Returns an iterator over the data. * * @return the iterator */ public abstract Iterator<String[]> iterator(); /** * Releases this handle and all associated resources. If a input handle is released all associated results are released * as well. */ public void release() { if (registry != null) { registry.release(this); } } /** * Renders this object * @return */ public ElementData render() { ElementData data = new ElementData("Data"); data.addProperty("Records", this.getNumRows()); data.addProperty("Attributes", this.getNumColumns()); return data; } /** * Replaces the original value with the replacement in the given column. Only supported by * handles for input data. * * @param column the column * @param original the original * @param replacement the replacement * @return Whether the original value was found */ public boolean replace(int column, String original, String replacement) { checkRegistry(); checkColumn(column); if (!getDataType(getAttributeName(column)).isValid(replacement)) { throw new IllegalArgumentException("Value does'nt match the attribute's data type"); } for (String s : getDistinctValues(column)) { if (s.equals(replacement)) { throw new IllegalArgumentException("Value is already contained in the data set"); } } return registry.replace(column, original, replacement); } /** * Writes the data to a CSV file. * * @param file the file * @throws IOException Signals that an I/O exception has occurred. */ public void save(final File file) throws IOException { checkRegistry(); final CSVDataOutput output = new CSVDataOutput(file); output.write(iterator()); } /** * Writes the data to a CSV file. * * @param file A file * @param separator The utilized separator character * @throws IOException Signals that an I/O exception has occurred. */ public void save(final File file, final char separator) throws IOException { checkRegistry(); final CSVDataOutput output = new CSVDataOutput(file, separator); output.write(iterator()); } /** * Writes the data to a CSV file. * * @param file the file * @param config the config * @throws IOException Signals that an I/O exception has occurred. */ public void save(final File file, final CSVSyntax config) throws IOException { checkRegistry(); final CSVDataOutput output = new CSVDataOutput(file, config); output.write(iterator()); } /** * Writes the data to a CSV file. * * @param out the out * @throws IOException Signals that an I/O exception has occurred. */ public void save(final OutputStream out) throws IOException { checkRegistry(); final CSVDataOutput output = new CSVDataOutput(out); output.write(iterator()); } /** * Writes the data to a CSV file. * * @param out Output stream * @param separator The utilized separator character * @throws IOException Signals that an I/O exception has occurred. */ public void save(final OutputStream out, final char separator) throws IOException { checkRegistry(); final CSVDataOutput output = new CSVDataOutput(out, separator); output.write(iterator()); } /** * Writes the data to a CSV file. * * @param out the out * @param config the config * @throws IOException Signals that an I/O exception has occurred. */ public void save(final OutputStream out, final CSVSyntax config) throws IOException { checkRegistry(); final CSVDataOutput output = new CSVDataOutput(out, config); output.write(iterator()); } /** * Writes the data to a CSV file. * * @param path the path * @throws IOException Signals that an I/O exception has occurred. */ public void save(final String path) throws IOException { checkRegistry(); final CSVDataOutput output = new CSVDataOutput(path); output.write(iterator()); } /** * Writes the data to a CSV file. * * @param path A path * @param separator The utilized separator character * @throws IOException Signals that an I/O exception has occurred. */ public void save(final String path, final char separator) throws IOException { checkRegistry(); final CSVDataOutput output = new CSVDataOutput(path, separator); output.write(iterator()); } /** * Writes the data to a CSV file. * * @param path the path * @param config the config * @throws IOException Signals that an I/O exception has occurred. */ public void save(final String path, final CSVSyntax config) throws IOException { checkRegistry(); final CSVDataOutput output = new CSVDataOutput(path, config); output.write(iterator()); } /** * Sorts the dataset according to the given columns. Will sort input and * output analogously. * * @param ascending Sort ascending or descending * @param columns An integer array containing column indicides */ public void sort(boolean ascending, int... columns) { checkRegistry(); registry.sort(this, ascending, columns); } /** * Sorts the dataset according to the given columns and the given range. * Will sort input and output analogously. * * @param from The lower bound * @param to The upper bound * @param ascending Sort ascending or descending * @param columns An integer array containing column indicides */ public void sort(int from, int to, boolean ascending, int... columns) { checkRegistry(); registry.sort(this, from, to, ascending, columns); } /** * Sorts the dataset according to the given columns. Will sort input and * output analogously. * * @param swapper A swapper * @param ascending Sort ascending or descending * @param columns An integer array containing column indicides */ public void sort(Swapper swapper, boolean ascending, int... columns) { checkRegistry(); registry.sort(this, swapper, ascending, columns); } /** * Sorts the dataset according to the given columns and the given range. * Will sort input and output analogously. * * @param swapper A swapper * @param from The lower bound * @param to The upper bound * @param ascending Sort ascending or descending * @param columns An integer array containing column indicides */ public void sort(Swapper swapper, int from, int to, boolean ascending, int... columns) { checkRegistry(); registry.sort(this, swapper, from, to, ascending, columns); } /** * Swaps both rows. * * @param row1 the row1 * @param row2 the row2 */ public void swap(int row1, int row2) { checkRegistry(); registry.swap(this, row1, row2); } /** * Checks a column index. * * @param column1 the column1 */ protected void checkColumn(final int column1) { if ((column1 < 0) || (column1 > (header.length - 1))) { throw new IndexOutOfBoundsException("Column index out of range: " + column1 + ". Valid: 0 - " + (header.length - 1)); } } /** * Checks the column indexes. * * @param columns the columns */ protected void checkColumns(final int[] columns) { // Check if ((columns.length == 0) || (columns.length > header.length)) { throw new IllegalArgumentException("Invalid number of column indices"); } // Create a sorted copy of the input columns final int[] cols = new int[columns.length]; System.arraycopy(columns, 0, cols, 0, cols.length); Arrays.sort(cols); // Check for (int i = 0; i < cols.length; i++) { checkColumn(cols[i]); if ((i > 0) && (cols[i] == cols[i - 1])) { throw new IllegalArgumentException("Duplicate column index"); } } } /** * Checks whether a registry is referenced. */ protected void checkRegistry() { if (registry == null) { throw new RuntimeException("This data handle (" + this.getClass().getSimpleName() + "@" + hashCode() + ") is orphaned"); } } /** * Checks a row index. * * @param row1 the row1 * @param length the length */ protected void checkRow(final int row1, final int length) { if ((row1 < 0) || (row1 > length)) { throw new IndexOutOfBoundsException("Row index (" + row1 + ") out of range (0 <= row <= " + length + ")"); } } /** * Releases all resources. */ protected abstract void doRelease(); /** * Returns the base data type without generalization. * * @param attribute the attribute * @return the base data type */ protected DataType<?> getBaseDataType(final String attribute) { checkRegistry(); return getRegistry().getBaseDataType(attribute); } /** * Returns the ARXConfiguration that is currently being used, null if this is an input handle * @return */ protected abstract ARXConfiguration getConfiguration(); /** * Generates an array of data types. * * @return the data type array */ protected abstract DataType<?>[] getColumnToDataType(); /** * Returns the distinct values. * * @param column the column * @param ignoreSuppression * @param handler the handler * @return the distinct values */ protected abstract String[] getDistinctValues(int column, boolean ignoreSuppression, InterruptHandler handler); /** * Returns the registry associated with this handle. * * @return the registry */ protected DataRegistry getRegistry() { return registry; } /** * Returns the internal value identifier * @param column * @param value * @return */ protected abstract int getValueIdentifier(int column, String value); /** * A negative integer, zero, or a positive integer as the first argument is * less than, equal to, or greater than the second. It uses the specified * data types for comparison. If no datatype is specified for a specific * column it uses string comparison. * * @param row1 the row1 * @param row2 the row2 * @param columns the columns * @param ascending the ascending * @return the int */ protected int internalCompare(final int row1, final int row2, final int[] columns, final boolean ascending) { checkRegistry(); try { for (int i = 0; i < columns.length; i++) { int index = columns[i]; int cmp = columnToDataType[index].compare(internalGetValue(row1, index, false), internalGetValue(row2, index, false)); if (cmp != 0) { return ascending ? cmp : -cmp; } } return 0; } catch (final Exception e) { throw new RuntimeException(e); } } /** * Internal representation of get encoded value. Returns -1 for suppressed values. * * @param row the row * @param col the col * @return the value */ protected abstract int internalGetEncodedValue(int row, int col, boolean ignoreSuppression); /** * Internal representation of get value. * * @param row the row * @param col the col * @return the string */ protected abstract String internalGetValue(int row, int col, boolean ignoreSuppression); /** * Internal replacement method. * * @param column the column * @param original the original * @param replacement the replacement * @return true, if successful */ protected abstract boolean internalReplace(int column, String original, String replacement); /** * Returns whether the data represented by this handle is anonymous * @return */ protected boolean isAnonymous() { return false; } /** * Sets the current header * @param header */ protected void setHeader(String[] header) { this.header = header; this.headerMap = new ObjectIntOpenHashMap<String>(); for (int i = 0; i < header.length; i++) { headerMap.put(header[i], i); } } /** * Updates the registry. * * @param registry the new registry */ protected void setRegistry(DataRegistry registry) { this.registry = registry; } /** * Sets the subset. * * @param handle the new view */ protected void setView(DataHandle handle) { subset = handle; } /** * Returns whether this is an outlier regarding the given columns. If no columns have been * specified, <code>true</code> will be returned. * @param row * @param columns * @return */ protected abstract boolean internalIsOutlier(int row, int[] columns); }
/* * Copyright 2013 Tomasz Konopka. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package bamfo.call; import bamfo.call.OneFilter.Relation; import bamfo.utils.BamfoTool; import bamfo.utils.NumberChecker; import java.io.BufferedReader; import java.io.File; import java.io.IOException; import java.io.OutputStream; import java.io.PrintStream; import java.text.DecimalFormat; import java.util.ArrayList; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; import joptsimple.OptionParser; import joptsimple.OptionSet; import jsequtils.file.BufferedReaderMaker; import jsequtils.file.OutputStreamMaker; import jsequtils.variants.VcfEntry; /** * A utility to filter VCF files. Supported operations are filtering by * inclusion to regions and filtering by key/threshold relations. * * Regions can be specified via bed files. Key/Threshold relations look at * columns 9 and 10 of the VCF table. * * * @author tkonopka */ public class BamfoVcfFilter extends BamfoTool implements Runnable { private File vcffile; private String outvcf = "stdout"; private final ArrayList<OneFilter> filters; private void printBamVcfFilterHelp() { outputStream.println("bam2x filtervariants: a tool for filtering VCF files"); outputStream.println(); outputStream.println("General options:"); outputStream.println(" --vcf <File> - input variant call file (VCF)"); outputStream.println(" --output <File> - output vcf file"); outputStream.println(" --filter <String> - name of applied filter"); outputStream.println(" --key <String> - filter by KEY RELATION THRESHOLD"); outputStream.println(" e.g. use \"SF>12\" for filtering for strand bias"); outputStream.println(" --bed <File> - filter by genomic region"); outputStream.println(); outputStream.println("Note: when applying multiple filters in a single command, specify names"); outputStream.println(" for key/threshold filters before names for by-region filters."); outputStream.println(); } private boolean parseBam2xVcfFilterParameters(String[] args) { OptionParser prs = new OptionParser(); // bam - input bam file prs.accepts("vcf").withRequiredArg().ofType(File.class); // filter - name of the filter prs.accepts("filter").withRequiredArg().ofType(String.class); // output - output directory prs.accepts("output").withRequiredArg().ofType(String.class); // bed - will accept a bed file prs.accepts("bed").withRequiredArg().ofType(File.class); // field - will accept a field name in the 9th column of the FORMAT column prs.accepts("key").withRequiredArg().ofType(String.class); // verbose - display verbose report by chromosome prs.accepts("verbose"); // now use OptionSet to parse the command line OptionSet options; try { options = prs.parse(args); } catch (Exception ex) { outputStream.println("Error parsing command line parameters\n" + ex.getMessage()); return false; } // extract command line paramters // must specify at least one of bed or key if (!options.has("bed") && !options.has("key")) { outputStream.println("Must specify one of --bed or --key"); return false; } if (options.has("vcf")) { vcffile = (File) options.valueOf("vcf"); if (!vcffile.canRead()) { outputStream.println("vcf file is not readable"); return false; } } else { outputStream.println("missing parameter vcf"); return false; } // if output is not set, will use stdout if (options.has("output")) { outvcf = (String) options.valueOf("output"); } // from here, start parsing and creating filters that will be applied on the input // create array for inputs, otherwise they may not be initialized ArrayList<String> filternames; ArrayList<File> bedfiles = new ArrayList<>(4); ArrayList<String> keycodes = new ArrayList<>(4); // if output is not set, will use stdout if (options.has("filter")) { filternames = new ArrayList<>((List<String>) options.valuesOf("filter")); } else { outputStream.println("filter name is not specified"); return false; } // get all the by-region if (options.has("bed")) { bedfiles = new ArrayList<>((List<File>) options.valuesOf("bed")); // check that all bed files can be read for (int i = 0; i < bedfiles.size(); i++) { File bedfile = bedfiles.get(i); if (!bedfile.canRead()) { outputStream.println("bed file is not readable"); return false; } } } // get all the key codes if (options.has("key")) { keycodes = new ArrayList<>((List<String>) options.valuesOf("key")); } // make sure that filter names and bed/key fields match if (filternames.size() != bedfiles.size() + keycodes.size()) { outputStream.println("Number of filter names and configurations (--bed and --key) do not match"); return false; } // if reached here, try to create all the filters. // the names of the filters are in a single array, key track of position on this array int filterindex = 0; // Create all threshold filters first for (int i = 0; i < keycodes.size(); i++) { OneFilter keyFilter = makeKeyThresholdFilter(filternames.get(filterindex), keycodes.get(i)); if (keyFilter != null) { filters.add(keyFilter); } else { outputStream.println("Could not understand key " + keycodes.get(i)); return false; } filterindex++; } // then create the by-region filters for (int i = 0; i < bedfiles.size(); i++) { OneFilter bedFilter; try { bedFilter = new OneFilter(filternames.get(filterindex), bedfiles.get(i)); } catch (IOException ex) { outputStream.println("Could not create bed filter"); return false; } filters.add(bedFilter); filterindex++; } // if reached here, filtering is by field value return true; } /** * Creates the utility. * * @param args * * arguments like those accepted via a command line * */ public BamfoVcfFilter(String[] args) { filters = new ArrayList<>(8); if (args == null) { printBamVcfFilterHelp(); return; } // parse the parameters, exit if not successful if (!parseBam2xVcfFilterParameters(args)) { return; } bamfolog.setVerbose(true); // signal that setup for this task has completed correctly isReady = true; } /** * A constructor which accepts ready made filters. * * @param invcf * * @param out * * @param filters * * @param logstream * * where messages will be sent. * * */ public BamfoVcfFilter(File invcf, String out, ArrayList<OneFilter> filters, PrintStream logstream) { super(logstream); this.vcffile = invcf; this.outvcf = out; this.filters = filters; isReady = true; } private OneFilter makeKeyThresholdFilter(String filtername, String code) { int codelen = code.length(); // first find the firs character of type ><= //(could perhaps be down with indexOf, byt have three possible relations int relationstart = -1; for (int i = 0; i < codelen; i++) { char nowchar = code.charAt(i); if (nowchar == '>' || nowchar == '<' || nowchar == '=') { relationstart = i; i = codelen; } } if (relationstart < 1) { return null; } // find the start of the threshold int thresholdstart = -1; if (relationstart + 1 < codelen) { char nowchar = code.charAt(relationstart + 1); if (nowchar == '=') { thresholdstart = relationstart + 2; } else { thresholdstart = relationstart + 1; } } if (thresholdstart < 1 || (thresholdstart >= codelen)) { return null; } // get the substrings for the fieldname, relation, and threshold String key = code.substring(0, relationstart); String fieldRelationString = code.substring(relationstart, thresholdstart); String keyThresholdString = code.substring(thresholdstart); Double keyThresholdDouble = null; // perhaps convert the threshold into a number if (NumberChecker.isDouble(keyThresholdString)) { keyThresholdDouble = Double.valueOf(keyThresholdString); } // convert the relation string into a number Relation keyRelation; if (fieldRelationString.equals(">")) { keyRelation = Relation.Greater; } else if (fieldRelationString.equals(">=")) { keyRelation = Relation.GreaterOrEqual; } else if (fieldRelationString.equals("=")) { keyRelation = Relation.Equal; } else if (fieldRelationString.equals("<")) { keyRelation = Relation.Less; } else if (fieldRelationString.equals("<=")) { keyRelation = Relation.LessOrEqual; } else { return null; } if (keyThresholdDouble == null) { return new OneFilter(filtername, key, keyRelation, keyThresholdString); } else { return new OneFilter(filtername, key, keyRelation, keyThresholdDouble); } } /** * After the utility is initialized, it has to be "executed" by invoking * this method. If initialization failed, this method does not do anything. * */ @Override public void run() { // abort if class has not been set up properly if (!isReady) { return; } // create the input/output streams OutputStream outstream; BufferedReader vcfreader; try { outstream = OutputStreamMaker.makeOutputStream(outvcf); vcfreader = BufferedReaderMaker.makeBufferedReader(vcffile); } catch (Exception ex) { outputStream.println("Error setting up streams"); return; } int numfilters = filters.size(); try { // copy the main header String header = copyHeader(vcfreader, outstream); // create comment lines for each applied filter for (int i = 0; i < numfilters; i++) { outstream.write(filters.get(i).getFilterHeaderLines().getBytes()); } // copy the #CHROM line outstream.write((header + "\n").getBytes()); // start processing the actual rows: read, filter with all filters, and write to output String s; while ((s = vcfreader.readLine()) != null && !Thread.currentThread().isInterrupted()) { VcfEntry entry = new VcfEntry(s); for (int i = 0; i < numfilters; i++) { filters.get(i).filterVcfEntry(entry); } // finally, output the entry outstream.write(entry.toString().getBytes()); } } catch (IOException ex) { outputStream.println("Error while filtering: " + ex.getMessage()); Logger.getLogger(BamfoVcfFilter.class.getName()).log(Level.SEVERE, null, ex); } // close the input/output streams try { vcfreader.close(); if (outstream != System.out) { outstream.close(); } } catch (IOException ex) { outputStream.println("Error finalizing streams: " + ex.getMessage()); Logger.getLogger(BamfoVcfFilter.class.getName()).log(Level.SEVERE, null, ex); } } /** * Starts reading the header of a vcf file. Copies the header lines from the * input to the output. The header line is not copies, but it is returned by * the function. * * @param vcfreader * @param outstream * @return * * The line from the input file which starts with "#CHROM" * * @throws IOException */ private String copyHeader(BufferedReader vcfreader, OutputStream outstream) throws IOException { String s = null; // loop and copy vcf header from file to outputstream boolean done = false; while (!done) { s = vcfreader.readLine(); if (s == null || s.startsWith("#CHROM")) { done = true; } else { outstream.write((s + "\n").getBytes()); } } // return the header line return s; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.server.initialization; import com.google.common.collect.ImmutableList; import com.google.common.util.concurrent.ListenableFuture; import com.google.inject.Binder; import com.google.inject.Injector; import com.google.inject.Key; import com.google.inject.Module; import com.google.inject.multibindings.Multibinder; import org.apache.commons.io.IOUtils; import org.apache.druid.guice.GuiceInjectors; import org.apache.druid.guice.Jerseys; import org.apache.druid.guice.JsonConfigProvider; import org.apache.druid.guice.LazySingleton; import org.apache.druid.guice.LifecycleModule; import org.apache.druid.guice.annotations.Self; import org.apache.druid.initialization.Initialization; import org.apache.druid.java.util.http.client.HttpClient; import org.apache.druid.java.util.http.client.HttpClientConfig; import org.apache.druid.java.util.http.client.HttpClientInit; import org.apache.druid.java.util.http.client.Request; import org.apache.druid.java.util.http.client.response.InputStreamResponseHandler; import org.apache.druid.java.util.http.client.response.StatusResponseHandler; import org.apache.druid.java.util.http.client.response.StatusResponseHolder; import org.apache.druid.metadata.PasswordProvider; import org.apache.druid.server.DruidNode; import org.apache.druid.server.initialization.jetty.JettyServerInitializer; import org.apache.druid.server.initialization.jetty.JettyServerModule; import org.apache.druid.server.initialization.jetty.ServletFilterHolder; import org.apache.druid.server.security.AuthTestUtils; import org.apache.druid.server.security.AuthorizerMapper; import org.eclipse.jetty.server.Server; import org.jboss.netty.handler.codec.http.HttpMethod; import org.joda.time.Duration; import org.junit.Assert; import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import javax.servlet.DispatcherType; import javax.servlet.Filter; import javax.servlet.http.HttpServletResponse; import javax.ws.rs.core.MediaType; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.StringWriter; import java.net.HttpURLConnection; import java.net.URL; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.util.EnumSet; import java.util.Locale; import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.Executor; import java.util.concurrent.Executors; import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; import java.util.zip.GZIPInputStream; import java.util.zip.GZIPOutputStream; public class JettyTest extends BaseJettyTest { @Rule public TemporaryFolder folder = new TemporaryFolder(); private HttpClientConfig sslConfig; private Injector injector; private LatchedRequestStateHolder latchedRequestState; @Override protected Injector setupInjector() { TLSServerConfig tlsConfig; try { File keyStore = new File(JettyTest.class.getClassLoader().getResource("server.jks").getFile()); Path tmpKeyStore = Files.copy(keyStore.toPath(), new File(folder.newFolder(), "server.jks").toPath()); File trustStore = new File(JettyTest.class.getClassLoader().getResource("truststore.jks").getFile()); Path tmpTrustStore = Files.copy(trustStore.toPath(), new File(folder.newFolder(), "truststore.jks").toPath()); PasswordProvider pp = () -> "druid123"; tlsConfig = new TLSServerConfig() { @Override public String getKeyStorePath() { return tmpKeyStore.toString(); } @Override public String getKeyStoreType() { return "jks"; } @Override public PasswordProvider getKeyStorePasswordProvider() { return pp; } @Override public PasswordProvider getKeyManagerPasswordProvider() { return pp; } @Override public String getTrustStorePath() { return tmpTrustStore.toString(); } @Override public String getTrustStoreAlgorithm() { return "PKIX"; } @Override public PasswordProvider getTrustStorePasswordProvider() { return pp; } @Override public String getCertAlias() { return "druid"; } @Override public boolean isRequireClientCertificate() { return false; } @Override public boolean isRequestClientCertificate() { return false; } @Override public boolean isValidateHostnames() { return false; } }; sslConfig = HttpClientConfig.builder() .withSslContext( HttpClientInit.sslContextWithTrustedKeyStore(tmpTrustStore.toString(), pp.getPassword()) ) .withWorkerCount(1) .withReadTimeout(Duration.ZERO) .build(); } catch (IOException e) { throw new RuntimeException(e); } final int ephemeralPort = ThreadLocalRandom.current().nextInt(49152, 65535); latchedRequestState = new LatchedRequestStateHolder(); injector = Initialization.makeInjectorWithModules( GuiceInjectors.makeStartupInjector(), ImmutableList.<Module>of( new Module() { @Override public void configure(Binder binder) { JsonConfigProvider.bindInstance( binder, Key.get(DruidNode.class, Self.class), new DruidNode("test", "localhost", false, ephemeralPort, ephemeralPort + 1, true, true) ); binder.bind(TLSServerConfig.class).toInstance(tlsConfig); binder.bind(JettyServerInitializer.class).to(JettyServerInit.class).in(LazySingleton.class); binder.bind(LatchedRequestStateHolder.class).toInstance(latchedRequestState); Multibinder<ServletFilterHolder> multibinder = Multibinder.newSetBinder( binder, ServletFilterHolder.class ); multibinder.addBinding().toInstance( new ServletFilterHolder() { @Override public String getPath() { return "/*"; } @Override public Map<String, String> getInitParameters() { return null; } @Override public Class<? extends Filter> getFilterClass() { return DummyAuthFilter.class; } @Override public Filter getFilter() { return null; } @Override public EnumSet<DispatcherType> getDispatcherType() { return null; } } ); Jerseys.addResource(binder, SlowResource.class); Jerseys.addResource(binder, LatchedResource.class); Jerseys.addResource(binder, ExceptionResource.class); Jerseys.addResource(binder, DefaultResource.class); Jerseys.addResource(binder, DirectlyReturnResource.class); binder.bind(AuthorizerMapper.class).toInstance(AuthTestUtils.TEST_AUTHORIZER_MAPPER); LifecycleModule.register(binder, Server.class); } } ) ); return injector; } @Test @Ignore // this test will deadlock if it hits an issue, so ignored by default public void testTimeouts() throws Exception { // test for request timeouts properly not locking up all threads final Executor executor = Executors.newFixedThreadPool(100); final AtomicLong count = new AtomicLong(0); final CountDownLatch latch = new CountDownLatch(1000); for (int i = 0; i < 10000; i++) { executor.execute( new Runnable() { @Override public void run() { executor.execute( new Runnable() { @Override public void run() { long startTime = System.currentTimeMillis(); long startTime2 = 0; try { ListenableFuture<StatusResponseHolder> go = client.go( new Request(HttpMethod.GET, new URL("http://localhost:" + port + "/slow/hello")), StatusResponseHandler.getInstance() ); startTime2 = System.currentTimeMillis(); go.get(); } catch (Exception e) { e.printStackTrace(); } finally { System.out.printf( Locale.ENGLISH, "Response time client%dtime taken for getting future%dCounter %d%n", System.currentTimeMillis() - startTime, System.currentTimeMillis() - startTime2, count.incrementAndGet() ); latch.countDown(); } } } ); } } ); } latch.await(); } @Test public void testGzipResponseCompression() throws Exception { final URL url = new URL("http://localhost:" + port + "/default"); final HttpURLConnection get = (HttpURLConnection) url.openConnection(); get.setRequestProperty("Accept-Encoding", "gzip"); Assert.assertEquals("gzip", get.getContentEncoding()); Assert.assertEquals( DEFAULT_RESPONSE_CONTENT, IOUtils.toString(new GZIPInputStream(get.getInputStream()), StandardCharsets.UTF_8) ); final HttpURLConnection post = (HttpURLConnection) url.openConnection(); post.setRequestProperty("Accept-Encoding", "gzip"); post.setRequestMethod("POST"); Assert.assertEquals("gzip", post.getContentEncoding()); Assert.assertEquals( DEFAULT_RESPONSE_CONTENT, IOUtils.toString(new GZIPInputStream(post.getInputStream()), StandardCharsets.UTF_8) ); final HttpURLConnection getNoGzip = (HttpURLConnection) url.openConnection(); Assert.assertNotEquals("gzip", getNoGzip.getContentEncoding()); Assert.assertEquals(DEFAULT_RESPONSE_CONTENT, IOUtils.toString(getNoGzip.getInputStream(), StandardCharsets.UTF_8)); final HttpURLConnection postNoGzip = (HttpURLConnection) url.openConnection(); postNoGzip.setRequestMethod("POST"); Assert.assertNotEquals("gzip", postNoGzip.getContentEncoding()); Assert.assertEquals( DEFAULT_RESPONSE_CONTENT, IOUtils.toString(postNoGzip.getInputStream(), StandardCharsets.UTF_8) ); } // Tests that threads are not stuck when partial chunk is not finalized // https://bugs.eclipse.org/bugs/show_bug.cgi?id=424107 @Test @Ignore // above bug is not fixed in jetty for gzip encoding, and the chunk is still finalized instead of throwing exception. public void testChunkNotFinalized() throws Exception { ListenableFuture<InputStream> go = client.go( new Request(HttpMethod.GET, new URL("http://localhost:" + port + "/exception/exception")), new InputStreamResponseHandler() ); try { StringWriter writer = new StringWriter(); IOUtils.copy(go.get(), writer, "utf-8"); Assert.fail("Should have thrown Exception"); } catch (IOException e) { // Expected. } } @Test public void testThreadNotStuckOnException() throws Exception { final CountDownLatch latch = new CountDownLatch(1); Executors.newSingleThreadExecutor().execute( new Runnable() { @Override public void run() { try { ListenableFuture<InputStream> go = client.go( new Request(HttpMethod.GET, new URL("http://localhost:" + port + "/exception/exception")), new InputStreamResponseHandler() ); StringWriter writer = new StringWriter(); IOUtils.copy(go.get(), writer, "utf-8"); } catch (IOException e) { // Expected. } catch (Throwable t) { throw new RuntimeException(t); } latch.countDown(); } } ); latch.await(5, TimeUnit.SECONDS); } @Test public void testExtensionAuthFilter() throws Exception { URL url = new URL("http://localhost:" + port + "/default"); HttpURLConnection get = (HttpURLConnection) url.openConnection(); get.setRequestProperty(DummyAuthFilter.AUTH_HDR, DummyAuthFilter.SECRET_USER); Assert.assertEquals(HttpServletResponse.SC_OK, get.getResponseCode()); get = (HttpURLConnection) url.openConnection(); get.setRequestProperty(DummyAuthFilter.AUTH_HDR, "hacker"); Assert.assertEquals(HttpServletResponse.SC_UNAUTHORIZED, get.getResponseCode()); } @Test public void testGzipRequestDecompression() throws Exception { String text = "hello"; ByteArrayOutputStream out = new ByteArrayOutputStream(); try (GZIPOutputStream gzipOutputStream = new GZIPOutputStream(out)) { gzipOutputStream.write(text.getBytes(Charset.defaultCharset())); } Request request = new Request(HttpMethod.POST, new URL("http://localhost:" + port + "/return")); request.setHeader("Content-Encoding", "gzip"); request.setContent(MediaType.TEXT_PLAIN, out.toByteArray()); Assert.assertEquals(text, new String(IOUtils.toByteArray(client.go( request, new InputStreamResponseHandler() ).get()), Charset.defaultCharset())); } @Test public void testNumConnectionsMetricHttp() throws Exception { String text = "hello"; ByteArrayOutputStream out = new ByteArrayOutputStream(); try (GZIPOutputStream gzipOutputStream = new GZIPOutputStream(out)) { gzipOutputStream.write(text.getBytes(Charset.defaultCharset())); } Request request = new Request(HttpMethod.GET, new URL("http://localhost:" + port + "/latched/hello")); request.setHeader("Content-Encoding", "gzip"); request.setContent(MediaType.TEXT_PLAIN, out.toByteArray()); JettyServerModule jsm = injector.getInstance(JettyServerModule.class); latchedRequestState.reset(); waitForJettyServerModuleActiveConnectionsZero(jsm); Assert.assertEquals(0, jsm.getActiveConnections()); ListenableFuture<InputStream> go = client.go( request, new InputStreamResponseHandler() ); latchedRequestState.clientWaitForServerToStartRequest(); Assert.assertEquals(1, jsm.getActiveConnections()); latchedRequestState.clientReadyToFinishRequest(); go.get(); waitForJettyServerModuleActiveConnectionsZero(jsm); Assert.assertEquals(0, jsm.getActiveConnections()); } @Test public void testNumConnectionsMetricHttps() throws Exception { String text = "hello"; ByteArrayOutputStream out = new ByteArrayOutputStream(); try (GZIPOutputStream gzipOutputStream = new GZIPOutputStream(out)) { gzipOutputStream.write(text.getBytes(Charset.defaultCharset())); } Request request = new Request(HttpMethod.GET, new URL("https://localhost:" + tlsPort + "/latched/hello")); request.setHeader("Content-Encoding", "gzip"); request.setContent(MediaType.TEXT_PLAIN, out.toByteArray()); HttpClient client; try { client = HttpClientInit.createClient( sslConfig, lifecycle ); } catch (Exception e) { throw new RuntimeException(e); } JettyServerModule jsm = injector.getInstance(JettyServerModule.class); latchedRequestState.reset(); waitForJettyServerModuleActiveConnectionsZero(jsm); Assert.assertEquals(0, jsm.getActiveConnections()); ListenableFuture<InputStream> go = client.go( request, new InputStreamResponseHandler() ); latchedRequestState.clientWaitForServerToStartRequest(); Assert.assertEquals(1, jsm.getActiveConnections()); latchedRequestState.clientReadyToFinishRequest(); go.get(); waitForJettyServerModuleActiveConnectionsZero(jsm); Assert.assertEquals(0, jsm.getActiveConnections()); } private void waitForJettyServerModuleActiveConnectionsZero(JettyServerModule jsm) throws InterruptedException { // it can take a bit to close the connection, so maybe sleep for a while and hope it closes final int sleepTimeMills = 10; final int totalSleeps = 5_000 / sleepTimeMills; int count = 0; while (jsm.getActiveConnections() > 0 && count++ < totalSleeps) { Thread.sleep(sleepTimeMills); } if (jsm.getActiveConnections() > 0) { throw new RuntimeException("Connections greater than 0"); } } }
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.idea.devkit.util; import com.intellij.openapi.module.Module; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.ProjectRootManager; import com.intellij.openapi.util.Key; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.*; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.search.GlobalSearchScopesCore; import com.intellij.psi.util.CachedValueProvider; import com.intellij.psi.util.CachedValuesManager; import com.intellij.ui.components.JBList; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.TestOnly; import java.io.File; /** * @author Konstantin Bulenkov */ public class PsiUtil { private static final Key<Boolean> IDEA_PROJECT = Key.create("idea.internal.inspections.enabled"); private static final String IDE_PROJECT_MARKER_CLASS = JBList.class.getName(); private static final String[] IDEA_PROJECT_MARKER_FILES = { "idea.iml", "community-main.iml", "intellij.idea.community.main.iml", "intellij.idea.ultimate.main.iml" }; private PsiUtil() { } public static boolean isInstantiable(@NotNull PsiClass cls) { PsiModifierList modList = cls.getModifierList(); if (modList == null || cls.isInterface() || modList.hasModifierProperty(PsiModifier.ABSTRACT) || !isPublicOrStaticInnerClass(cls)) { return false; } PsiMethod[] constructors = cls.getConstructors(); if (constructors.length == 0) return true; for (PsiMethod constructor : constructors) { if (constructor.getParameterList().isEmpty() && constructor.hasModifierProperty(PsiModifier.PUBLIC)) { return true; } } return false; } public static boolean isPublicOrStaticInnerClass(@NotNull PsiClass cls) { PsiModifierList modifiers = cls.getModifierList(); if (modifiers == null) return false; return modifiers.hasModifierProperty(PsiModifier.PUBLIC) && (cls.getParent() instanceof PsiFile || modifiers.hasModifierProperty(PsiModifier.STATIC)); } @Nullable public static String getReturnedLiteral(PsiMethod method, PsiClass cls) { PsiExpression value = getReturnedExpression(method); if (value instanceof PsiLiteralExpression) { Object str = ((PsiLiteralExpression)value).getValue(); return str == null ? null : str.toString(); } else if (value instanceof PsiMethodCallExpression) { if (isSimpleClassNameExpression((PsiMethodCallExpression)value)) { return cls.getName(); } } return null; } private static boolean isSimpleClassNameExpression(PsiMethodCallExpression expr) { String text = expr.getText(); if (text == null) return false; text = text.replaceAll(" ", "") .replaceAll("\n", "") .replaceAll("\t", "") .replaceAll("\r", ""); return "getClass().getSimpleName()".equals(text) || "this.getClass().getSimpleName()".equals(text); } @Nullable public static PsiExpression getReturnedExpression(PsiMethod method) { PsiCodeBlock body = method.getBody(); if (body != null) { PsiStatement[] statements = body.getStatements(); if (statements.length == 1 && statements[0] instanceof PsiReturnStatement) { PsiExpression value = ((PsiReturnStatement)statements[0]).getReturnValue(); if (value instanceof PsiReferenceExpression) { PsiElement element = ((PsiReferenceExpression)value).resolve(); if (element instanceof PsiField) { PsiField field = (PsiField)element; if (field.hasModifierProperty(PsiModifier.FINAL)) { return field.getInitializer(); } } } return value; } } return null; } @Nullable public static PsiMethod findNearestMethod(String name, @Nullable PsiClass cls) { if (cls == null) return null; for (PsiMethod method : cls.getMethods()) { if (method.getParameterList().isEmpty() && method.getName().equals(name)) { return method.getModifierList().hasModifierProperty(PsiModifier.ABSTRACT) ? null : method; } } return findNearestMethod(name, cls.getSuperClass()); } public static boolean isIdeaProject(@Nullable Project project) { if (project == null) return false; Boolean flag = project.getUserData(IDEA_PROJECT); if (flag == null) { flag = checkIdeaProject(project); project.putUserData(IDEA_PROJECT, flag); } return flag; } public static boolean isPluginProject(@NotNull final Project project) { return CachedValuesManager.getManager(project).getCachedValue(project, () -> { boolean foundMarkerClass = JavaPsiFacade.getInstance(project).findClass(IDE_PROJECT_MARKER_CLASS, GlobalSearchScope.allScope(project)) != null; return CachedValueProvider.Result.createSingleDependency(foundMarkerClass, ProjectRootManager.getInstance(project)); }); } public static boolean isPluginModule(@NotNull final Module module) { return CachedValuesManager.getManager(module.getProject()).getCachedValue(module, () -> { boolean foundMarkerClass = JavaPsiFacade.getInstance(module.getProject()) .findClass(IDE_PROJECT_MARKER_CLASS, GlobalSearchScope.moduleRuntimeScope(module, false)) != null; return CachedValueProvider.Result.createSingleDependency(foundMarkerClass, ProjectRootManager.getInstance(module.getProject())); }); } private static boolean isIntelliJBasedDir(VirtualFile baseDir) { if (baseDir == null) { return false; } for (VirtualFile dir : new VirtualFile[]{baseDir, baseDir.findChild("community"), baseDir.findChild("ultimate")}) { if (dir == null || !dir.isDirectory()) { continue; } for (String fileName : IDEA_PROJECT_MARKER_FILES) { if (dir.findChild(fileName) != null) { return true; } } } return false; } @TestOnly public static void markAsIdeaProject(@NotNull Project project, boolean value) { project.putUserData(IDEA_PROJECT, value); } private static boolean checkIdeaProject(@NotNull Project project) { if (!isIntelliJBasedDir(project.getBaseDir())) { return false; } GlobalSearchScope scope = GlobalSearchScopesCore.projectProductionScope(project); return JavaPsiFacade.getInstance(project).findClass(IDE_PROJECT_MARKER_CLASS, scope) != null; } @NotNull public static <E extends PsiElement> SmartPsiElementPointer<E> createPointer(@NotNull E e) { return SmartPointerManager.getInstance(e.getProject()).createSmartPsiElementPointer(e); } public static boolean isPluginXmlPsiElement(@NotNull PsiElement element) { return isPluginProject(element.getProject()) && DescriptorUtil.isPluginXml(element.getContainingFile()); } public static boolean isPathToIntelliJIdeaSources(String path) { for (String file : IDEA_PROJECT_MARKER_FILES) { if (new File(path, file).isFile()) return true; } return false; } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // Code generated by Microsoft (R) AutoRest Code Generator. package com.azure.resourcemanager.loganalytics.implementation; import com.azure.core.annotation.BodyParam; import com.azure.core.annotation.Delete; import com.azure.core.annotation.ExpectedResponses; import com.azure.core.annotation.Get; import com.azure.core.annotation.HeaderParam; import com.azure.core.annotation.Headers; import com.azure.core.annotation.Host; import com.azure.core.annotation.HostParam; import com.azure.core.annotation.PathParam; import com.azure.core.annotation.Put; import com.azure.core.annotation.QueryParam; import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceInterface; import com.azure.core.annotation.ServiceMethod; import com.azure.core.annotation.UnexpectedResponseExceptionType; import com.azure.core.http.rest.PagedFlux; import com.azure.core.http.rest.PagedIterable; import com.azure.core.http.rest.PagedResponse; import com.azure.core.http.rest.PagedResponseBase; import com.azure.core.http.rest.Response; import com.azure.core.http.rest.RestProxy; import com.azure.core.management.exception.ManagementException; import com.azure.core.util.Context; import com.azure.core.util.FluxUtil; import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.loganalytics.fluent.DataExportsClient; import com.azure.resourcemanager.loganalytics.fluent.models.DataExportInner; import com.azure.resourcemanager.loganalytics.models.DataExportListResult; import reactor.core.publisher.Mono; /** An instance of this class provides access to all the operations defined in DataExportsClient. */ public final class DataExportsClientImpl implements DataExportsClient { private final ClientLogger logger = new ClientLogger(DataExportsClientImpl.class); /** The proxy service used to perform REST calls. */ private final DataExportsService service; /** The service client containing this operation class. */ private final OperationalInsightsManagementClientImpl client; /** * Initializes an instance of DataExportsClientImpl. * * @param client the instance of the service client containing this operation class. */ DataExportsClientImpl(OperationalInsightsManagementClientImpl client) { this.service = RestProxy.create(DataExportsService.class, client.getHttpPipeline(), client.getSerializerAdapter()); this.client = client; } /** * The interface defining all the services for OperationalInsightsManagementClientDataExports to be used by the * proxy service to perform REST calls. */ @Host("{$host}") @ServiceInterface(name = "OperationalInsightsM") private interface DataExportsService { @Headers({"Content-Type: application/json"}) @Get( "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights" + "/workspaces/{workspaceName}/dataExports") @ExpectedResponses({200}) @UnexpectedResponseExceptionType(ManagementException.class) Mono<Response<DataExportListResult>> listByWorkspace( @HostParam("$host") String endpoint, @PathParam("subscriptionId") String subscriptionId, @PathParam("resourceGroupName") String resourceGroupName, @PathParam("workspaceName") String workspaceName, @QueryParam("api-version") String apiVersion, @HeaderParam("Accept") String accept, Context context); @Headers({"Content-Type: application/json"}) @Put( "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights" + "/workspaces/{workspaceName}/dataExports/{dataExportName}") @ExpectedResponses({200, 201}) @UnexpectedResponseExceptionType(ManagementException.class) Mono<Response<DataExportInner>> createOrUpdate( @HostParam("$host") String endpoint, @PathParam("subscriptionId") String subscriptionId, @PathParam("resourceGroupName") String resourceGroupName, @PathParam("workspaceName") String workspaceName, @PathParam("dataExportName") String dataExportName, @QueryParam("api-version") String apiVersion, @BodyParam("application/json") DataExportInner parameters, @HeaderParam("Accept") String accept, Context context); @Headers({"Content-Type: application/json"}) @Get( "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights" + "/workspaces/{workspaceName}/dataExports/{dataExportName}") @ExpectedResponses({200}) @UnexpectedResponseExceptionType(ManagementException.class) Mono<Response<DataExportInner>> get( @HostParam("$host") String endpoint, @PathParam("subscriptionId") String subscriptionId, @PathParam("resourceGroupName") String resourceGroupName, @PathParam("workspaceName") String workspaceName, @PathParam("dataExportName") String dataExportName, @QueryParam("api-version") String apiVersion, @HeaderParam("Accept") String accept, Context context); @Headers({"Content-Type: application/json"}) @Delete( "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights" + "/workspaces/{workspaceName}/dataExports/{dataExportName}") @ExpectedResponses({200, 404}) @UnexpectedResponseExceptionType(ManagementException.class) Mono<Response<Void>> delete( @HostParam("$host") String endpoint, @PathParam("subscriptionId") String subscriptionId, @PathParam("resourceGroupName") String resourceGroupName, @PathParam("workspaceName") String workspaceName, @PathParam("dataExportName") String dataExportName, @QueryParam("api-version") String apiVersion, @HeaderParam("Accept") String accept, Context context); } /** * Lists the data export instances within a workspace. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return result of the request to list data exports. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<PagedResponse<DataExportInner>> listByWorkspaceSinglePageAsync( String resourceGroupName, String workspaceName) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getSubscriptionId() is required and cannot be null.")); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); } if (workspaceName == null) { return Mono.error(new IllegalArgumentException("Parameter workspaceName is required and cannot be null.")); } final String accept = "application/json"; return FluxUtil .withContext( context -> service .listByWorkspace( this.client.getEndpoint(), this.client.getSubscriptionId(), resourceGroupName, workspaceName, this.client.getApiVersion(), accept, context)) .<PagedResponse<DataExportInner>>map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), null, null)) .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); } /** * Lists the data export instances within a workspace. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return result of the request to list data exports. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<PagedResponse<DataExportInner>> listByWorkspaceSinglePageAsync( String resourceGroupName, String workspaceName, Context context) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getSubscriptionId() is required and cannot be null.")); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); } if (workspaceName == null) { return Mono.error(new IllegalArgumentException("Parameter workspaceName is required and cannot be null.")); } final String accept = "application/json"; context = this.client.mergeContext(context); return service .listByWorkspace( this.client.getEndpoint(), this.client.getSubscriptionId(), resourceGroupName, workspaceName, this.client.getApiVersion(), accept, context) .map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), null, null)); } /** * Lists the data export instances within a workspace. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return result of the request to list data exports. */ @ServiceMethod(returns = ReturnType.COLLECTION) private PagedFlux<DataExportInner> listByWorkspaceAsync(String resourceGroupName, String workspaceName) { return new PagedFlux<>(() -> listByWorkspaceSinglePageAsync(resourceGroupName, workspaceName)); } /** * Lists the data export instances within a workspace. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return result of the request to list data exports. */ @ServiceMethod(returns = ReturnType.COLLECTION) private PagedFlux<DataExportInner> listByWorkspaceAsync( String resourceGroupName, String workspaceName, Context context) { return new PagedFlux<>(() -> listByWorkspaceSinglePageAsync(resourceGroupName, workspaceName, context)); } /** * Lists the data export instances within a workspace. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return result of the request to list data exports. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<DataExportInner> listByWorkspace(String resourceGroupName, String workspaceName) { return new PagedIterable<>(listByWorkspaceAsync(resourceGroupName, workspaceName)); } /** * Lists the data export instances within a workspace. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return result of the request to list data exports. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<DataExportInner> listByWorkspace( String resourceGroupName, String workspaceName, Context context) { return new PagedIterable<>(listByWorkspaceAsync(resourceGroupName, workspaceName, context)); } /** * Create or update a data export. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param dataExportName The data export rule name. * @param parameters The parameters required to create or update a data export. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the top level data export resource container. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<Response<DataExportInner>> createOrUpdateWithResponseAsync( String resourceGroupName, String workspaceName, String dataExportName, DataExportInner parameters) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getSubscriptionId() is required and cannot be null.")); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); } if (workspaceName == null) { return Mono.error(new IllegalArgumentException("Parameter workspaceName is required and cannot be null.")); } if (dataExportName == null) { return Mono.error(new IllegalArgumentException("Parameter dataExportName is required and cannot be null.")); } if (parameters == null) { return Mono.error(new IllegalArgumentException("Parameter parameters is required and cannot be null.")); } else { parameters.validate(); } final String accept = "application/json"; return FluxUtil .withContext( context -> service .createOrUpdate( this.client.getEndpoint(), this.client.getSubscriptionId(), resourceGroupName, workspaceName, dataExportName, this.client.getApiVersion(), parameters, accept, context)) .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); } /** * Create or update a data export. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param dataExportName The data export rule name. * @param parameters The parameters required to create or update a data export. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the top level data export resource container. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<Response<DataExportInner>> createOrUpdateWithResponseAsync( String resourceGroupName, String workspaceName, String dataExportName, DataExportInner parameters, Context context) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getSubscriptionId() is required and cannot be null.")); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); } if (workspaceName == null) { return Mono.error(new IllegalArgumentException("Parameter workspaceName is required and cannot be null.")); } if (dataExportName == null) { return Mono.error(new IllegalArgumentException("Parameter dataExportName is required and cannot be null.")); } if (parameters == null) { return Mono.error(new IllegalArgumentException("Parameter parameters is required and cannot be null.")); } else { parameters.validate(); } final String accept = "application/json"; context = this.client.mergeContext(context); return service .createOrUpdate( this.client.getEndpoint(), this.client.getSubscriptionId(), resourceGroupName, workspaceName, dataExportName, this.client.getApiVersion(), parameters, accept, context); } /** * Create or update a data export. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param dataExportName The data export rule name. * @param parameters The parameters required to create or update a data export. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the top level data export resource container. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<DataExportInner> createOrUpdateAsync( String resourceGroupName, String workspaceName, String dataExportName, DataExportInner parameters) { return createOrUpdateWithResponseAsync(resourceGroupName, workspaceName, dataExportName, parameters) .flatMap( (Response<DataExportInner> res) -> { if (res.getValue() != null) { return Mono.just(res.getValue()); } else { return Mono.empty(); } }); } /** * Create or update a data export. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param dataExportName The data export rule name. * @param parameters The parameters required to create or update a data export. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the top level data export resource container. */ @ServiceMethod(returns = ReturnType.SINGLE) public DataExportInner createOrUpdate( String resourceGroupName, String workspaceName, String dataExportName, DataExportInner parameters) { return createOrUpdateAsync(resourceGroupName, workspaceName, dataExportName, parameters).block(); } /** * Create or update a data export. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param dataExportName The data export rule name. * @param parameters The parameters required to create or update a data export. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the top level data export resource container. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<DataExportInner> createOrUpdateWithResponse( String resourceGroupName, String workspaceName, String dataExportName, DataExportInner parameters, Context context) { return createOrUpdateWithResponseAsync(resourceGroupName, workspaceName, dataExportName, parameters, context) .block(); } /** * Gets a data export instance. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param dataExportName The data export rule name. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a data export instance. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<Response<DataExportInner>> getWithResponseAsync( String resourceGroupName, String workspaceName, String dataExportName) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getSubscriptionId() is required and cannot be null.")); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); } if (workspaceName == null) { return Mono.error(new IllegalArgumentException("Parameter workspaceName is required and cannot be null.")); } if (dataExportName == null) { return Mono.error(new IllegalArgumentException("Parameter dataExportName is required and cannot be null.")); } final String accept = "application/json"; return FluxUtil .withContext( context -> service .get( this.client.getEndpoint(), this.client.getSubscriptionId(), resourceGroupName, workspaceName, dataExportName, this.client.getApiVersion(), accept, context)) .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); } /** * Gets a data export instance. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param dataExportName The data export rule name. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a data export instance. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<Response<DataExportInner>> getWithResponseAsync( String resourceGroupName, String workspaceName, String dataExportName, Context context) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getSubscriptionId() is required and cannot be null.")); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); } if (workspaceName == null) { return Mono.error(new IllegalArgumentException("Parameter workspaceName is required and cannot be null.")); } if (dataExportName == null) { return Mono.error(new IllegalArgumentException("Parameter dataExportName is required and cannot be null.")); } final String accept = "application/json"; context = this.client.mergeContext(context); return service .get( this.client.getEndpoint(), this.client.getSubscriptionId(), resourceGroupName, workspaceName, dataExportName, this.client.getApiVersion(), accept, context); } /** * Gets a data export instance. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param dataExportName The data export rule name. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a data export instance. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<DataExportInner> getAsync(String resourceGroupName, String workspaceName, String dataExportName) { return getWithResponseAsync(resourceGroupName, workspaceName, dataExportName) .flatMap( (Response<DataExportInner> res) -> { if (res.getValue() != null) { return Mono.just(res.getValue()); } else { return Mono.empty(); } }); } /** * Gets a data export instance. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param dataExportName The data export rule name. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a data export instance. */ @ServiceMethod(returns = ReturnType.SINGLE) public DataExportInner get(String resourceGroupName, String workspaceName, String dataExportName) { return getAsync(resourceGroupName, workspaceName, dataExportName).block(); } /** * Gets a data export instance. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param dataExportName The data export rule name. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a data export instance. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<DataExportInner> getWithResponse( String resourceGroupName, String workspaceName, String dataExportName, Context context) { return getWithResponseAsync(resourceGroupName, workspaceName, dataExportName, context).block(); } /** * Deletes the specified data export in a given workspace.. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param dataExportName The data export rule name. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the completion. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<Response<Void>> deleteWithResponseAsync( String resourceGroupName, String workspaceName, String dataExportName) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getSubscriptionId() is required and cannot be null.")); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); } if (workspaceName == null) { return Mono.error(new IllegalArgumentException("Parameter workspaceName is required and cannot be null.")); } if (dataExportName == null) { return Mono.error(new IllegalArgumentException("Parameter dataExportName is required and cannot be null.")); } final String accept = "application/json"; return FluxUtil .withContext( context -> service .delete( this.client.getEndpoint(), this.client.getSubscriptionId(), resourceGroupName, workspaceName, dataExportName, this.client.getApiVersion(), accept, context)) .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); } /** * Deletes the specified data export in a given workspace.. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param dataExportName The data export rule name. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the completion. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<Response<Void>> deleteWithResponseAsync( String resourceGroupName, String workspaceName, String dataExportName, Context context) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getSubscriptionId() is required and cannot be null.")); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); } if (workspaceName == null) { return Mono.error(new IllegalArgumentException("Parameter workspaceName is required and cannot be null.")); } if (dataExportName == null) { return Mono.error(new IllegalArgumentException("Parameter dataExportName is required and cannot be null.")); } final String accept = "application/json"; context = this.client.mergeContext(context); return service .delete( this.client.getEndpoint(), this.client.getSubscriptionId(), resourceGroupName, workspaceName, dataExportName, this.client.getApiVersion(), accept, context); } /** * Deletes the specified data export in a given workspace.. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param dataExportName The data export rule name. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the completion. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<Void> deleteAsync(String resourceGroupName, String workspaceName, String dataExportName) { return deleteWithResponseAsync(resourceGroupName, workspaceName, dataExportName) .flatMap((Response<Void> res) -> Mono.empty()); } /** * Deletes the specified data export in a given workspace.. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param dataExportName The data export rule name. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. */ @ServiceMethod(returns = ReturnType.SINGLE) public void delete(String resourceGroupName, String workspaceName, String dataExportName) { deleteAsync(resourceGroupName, workspaceName, dataExportName).block(); } /** * Deletes the specified data export in a given workspace.. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param dataExportName The data export rule name. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the response. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<Void> deleteWithResponse( String resourceGroupName, String workspaceName, String dataExportName, Context context) { return deleteWithResponseAsync(resourceGroupName, workspaceName, dataExportName, context).block(); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.reef.tang.formats; import org.apache.commons.cli.*; import org.apache.reef.tang.Configuration; import org.apache.reef.tang.ConfigurationBuilder; import org.apache.reef.tang.JavaConfigurationBuilder; import org.apache.reef.tang.Tang; import org.apache.reef.tang.annotations.Name; import org.apache.reef.tang.exceptions.BindException; import org.apache.reef.tang.exceptions.NameResolutionException; import org.apache.reef.tang.types.NamedParameterNode; import org.apache.reef.tang.types.Node; import org.apache.reef.tang.util.MonotonicTreeMap; import org.apache.reef.tang.util.ReflectionUtilities; import java.io.IOException; import java.util.HashMap; import java.util.Map; public final class CommandLine { final Map<Option, CommandLineCallback> applicationOptions = new HashMap<>(); private final ConfigurationBuilder conf; private final Map<String, String> shortNames = new MonotonicTreeMap<>(); public CommandLine() { this.conf = Tang.Factory.getTang().newConfigurationBuilder(); } public CommandLine(final ConfigurationBuilder conf) { this.conf = conf; } public ConfigurationBuilder getBuilder() { return this.conf; } public CommandLine registerShortNameOfClass(final String s) throws BindException { final Node n; try { n = conf.getClassHierarchy().getNode(s); } catch (final NameResolutionException e) { throw new BindException("Problem loading class " + s, e); } if (n instanceof NamedParameterNode) { final NamedParameterNode<?> np = (NamedParameterNode<?>) n; final String shortName = np.getShortName(); final String longName = np.getFullName(); if (shortName == null) { throw new BindException( "Can't register non-existent short name of named parameter: " + longName); } shortNames.put(shortName, longName); } else { throw new BindException("Can't register short name for non-NamedParameterNode: " + n); } return this; } public CommandLine registerShortNameOfClass( final Class<? extends Name<?>> c) throws BindException { return registerShortNameOfClass(ReflectionUtilities.getFullName(c)); } @SuppressWarnings("static-access") private Options getCommandLineOptions() { final Options opts = new Options(); for (final String shortName : shortNames.keySet()) { final String longName = shortNames.get(shortName); try { opts.addOption(OptionBuilder .withArgName(conf.classPrettyDefaultString(longName)).hasArg() .withDescription(conf.classPrettyDescriptionString(longName)) .create(shortName)); } catch (final BindException e) { throw new IllegalStateException( "Could not process " + shortName + " which is the short name of " + longName, e); } } for (final Option o : applicationOptions.keySet()) { opts.addOption(o); } return opts; } public CommandLine addCommandLineOption(final Option option, final CommandLineCallback cb) { // TODO: Check for conflicting options. applicationOptions.put(option, cb); return this; } /** * @param args * @return Selfie if the command line parsing succeeded, null (or exception) otherwise. * @throws IOException * @throws NumberFormatException * @throws ParseException */ @SafeVarargs final public <T> CommandLine processCommandLine( final String[] args, Class<? extends Name<?>>... argClasses) throws IOException, BindException { for (final Class<? extends Name<?>> c : argClasses) { registerShortNameOfClass(c); } final Options o = getCommandLineOptions(); o.addOption(new Option("?", "help")); final Parser g = new GnuParser(); final org.apache.commons.cli.CommandLine cl; try { cl = g.parse(o, args); } catch (final ParseException e) { throw new IOException("Could not parse config file", e); } if (cl.hasOption("?")) { new HelpFormatter().printHelp("reef", o); return null; } for (final Option option : cl.getOptions()) { final String shortName = option.getOpt(); final String value = option.getValue(); if (applicationOptions.containsKey(option)) { applicationOptions.get(option).process(option); } else { try { conf.bind(shortNames.get(shortName), value); } catch (final BindException e) { throw new BindException("Could not bind shortName " + shortName + " to value " + value, e); } } } return this; } /** * Utility method to quickly parse a command line to a Configuration. * <p/> * This is equivalent to * <code>parseToConfigurationBuilder(args, argClasses).build()</code> * * @param args the command line parameters to parse. * @param argClasses the named parameters to look for. * @return a Configuration with the parsed parameters * @throws ParseException if the parsing of the commandline fails. */ public static Configuration parseToConfiguration(final String[] args, final Class<? extends Name<?>>... argClasses) throws ParseException { return parseToConfigurationBuilder(args, argClasses).build(); } /** * Utility method to quickly parse a command line to a ConfigurationBuilder. * <p/> * This is equivalent to * <code>new CommandLine().processCommandLine(args, argClasses).getBuilder()</code>, but with additional checks. * * @param args the command line parameters to parse. * @param argClasses the named parameters to look for. * @return a ConfigurationBuilder with the parsed parameters * @throws ParseException if the parsing of the commandline fails. */ public static ConfigurationBuilder parseToConfigurationBuilder(final String[] args, final Class<? extends Name<?>>... argClasses) throws ParseException { final CommandLine commandLine; try { commandLine = new CommandLine().processCommandLine(args, argClasses); } catch (final IOException e) { // processCommandLine() converts ParseException into IOException. This reverts that to make exception handling // more straight forward. throw new ParseException(e.getMessage()); } // processCommandLine() indicates that it might return null. We need to guard users of this one from that if (commandLine == null) { throw new ParseException("Unable to parse the command line and the parser returned null."); } else { return commandLine.getBuilder(); } } public interface CommandLineCallback { public void process(final Option option); } }
package com.alyyousuf.rolle; import java.io.IOException; import java.io.InputStream; import java.util.Locale; import java.util.UUID; import android.app.Activity; import android.app.ActionBar; import android.app.Fragment; import android.app.FragmentManager; import android.app.FragmentTransaction; import android.bluetooth.BluetoothAdapter; import android.bluetooth.BluetoothDevice; import android.bluetooth.BluetoothSocket; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.support.v13.app.FragmentPagerAdapter; import android.os.Bundle; import android.support.v4.view.ViewPager; import android.util.Log; import android.view.Menu; import android.view.MenuItem; import android.widget.EditText; import android.widget.TextView; import android.widget.Toast; public class MainActivity extends Activity implements ActionBar.TabListener { private static final int requestCode_blRequest = 1; private MainActivity thisActivity; Fragment[] fragmentList; SectionsPagerAdapter mSectionsPagerAdapter; ViewPager mViewPager; MenuItem btIcon; private BluetoothAdapter btAdapter = null; private BluetoothDevice btDevice = null; private BluetoothSocket btSocket = null; private boolean btConnected = false; private InputStream btInStream = null; private static final String btAddress = "20:13:06:24:02:72"; private static final UUID btUUDID = UUID.fromString("00001101-0000-1000-8000-00805F9B34FB"); @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); thisActivity = this; setContentView(R.layout.activity_main); // Set up the action bar. final ActionBar actionBar = getActionBar(); actionBar.setNavigationMode(ActionBar.NAVIGATION_MODE_TABS); // Create the adapter that will return a fragment for each of the three // primary sections of the activity. mSectionsPagerAdapter = new SectionsPagerAdapter(getFragmentManager()); fragmentList = new Fragment[mSectionsPagerAdapter.getCount()]; // Set up the ViewPager with the sections adapter. mViewPager = (ViewPager) findViewById(R.id.pager); mViewPager.setAdapter(mSectionsPagerAdapter); // When swiping between different sections, select the corresponding // tab. We can also use ActionBar.Tab#select() to do this if we have // a reference to the Tab. mViewPager.setOnPageChangeListener(new ViewPager.SimpleOnPageChangeListener() { @Override public void onPageSelected(int position) { actionBar.setSelectedNavigationItem(position); } }); // For each of the sections in the app, add a tab to the action bar. for (int i = 0; i < mSectionsPagerAdapter.getCount(); i++) { // Create a tab with text corresponding to the page title defined by // the adapter. Also specify this Activity object, which implements // the TabListener interface, as the callback (listener) for when // this tab is selected. actionBar.addTab( actionBar.newTab() .setText(mSectionsPagerAdapter.getPageTitle(i)) .setTabListener(this)); } btAdapter = BluetoothAdapter.getDefaultAdapter(); BroadcastReceiver mReceiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { final String action = intent.getAction(); // Action State if (action.equals(BluetoothAdapter.ACTION_STATE_CHANGED)) { final int state = intent.getIntExtra(BluetoothAdapter.EXTRA_STATE, BluetoothAdapter.ERROR); switch (state) { case BluetoothAdapter.STATE_OFF: btDisconnect(); break; } } // Connection State if (action.equals(BluetoothDevice.ACTION_ACL_DISCONNECTED)) { if(btAdapter.isEnabled()) { Toast.makeText(getApplicationContext(), "Connection lost. Please reconnect.", Toast.LENGTH_SHORT).show(); btDisconnect(); } } } }; Runnable btReceiving = new Runnable() { @Override public void run() { while(true) { String message = ""; while(btConnected && btInStream != null) { try { char ch = (char)btInStream.read(); if(ch == '\n') break; message += ch; } catch (IOException e) { //Log.e("Bluetooth", "Exception during read.", e); } } if(message.startsWith("a:")) { String m[] = message.substring(2).split(","); if(m.length == 9) { final float n[] = {Float.parseFloat(m[0]), Float.parseFloat(m[1]), Float.parseFloat(m[2]), Float.parseFloat(m[3]), Float.parseFloat(m[4]), Float.parseFloat(m[5]), Float.parseFloat(m[6]), Float.parseFloat(m[7]), Float.parseFloat(m[8])}; runOnUiThread(new Runnable() { @Override public void run() { if(paused) return; OpenGLFragment frag = (OpenGLFragment)fragmentList[0]; if(frag != null) { frag.setAngle(n); } } }); } }else if(message.startsWith("PID:")) { //Toast.makeText(getApplicationContext(), "Received something.", Toast.LENGTH_SHORT).show(); Log.d("PID", message); final String m[] = message.substring(4).split(","); if(m.length == 3) { //final float n[] = {Float.parseFloat(m[0]), Float.parseFloat(m[1]), Float.parseFloat(m[2])}; runOnUiThread(new Runnable() { @Override public void run() { EditText pText = (EditText) findViewById(R.id.pText); EditText iText = (EditText) findViewById(R.id.iText); EditText dText = (EditText) findViewById(R.id.dText); pText.setText(m[0]); iText.setText(m[1]); dText.setText(m[2]); Toast.makeText(getApplicationContext(), "Received current PID values from the robot.", Toast.LENGTH_SHORT).show(); } }); } } } } }; Thread btReceivingThread = new Thread(btReceiving); btReceivingThread.start(); IntentFilter filter = new IntentFilter(BluetoothAdapter.ACTION_STATE_CHANGED); filter.addAction(BluetoothDevice.ACTION_ACL_DISCONNECTED); registerReceiver(mReceiver, filter); } public void btDisconnect() { if(!btConnected) return; // Prevent double calls try { if(btSocket != null) btSocket.close(); } catch(Exception e) { // Do nothing } btSocket = null; btConnected = false; btIcon.setIcon(R.drawable.bluetooth_connect); Toast.makeText(getApplicationContext(), "RollE disconnected.", Toast.LENGTH_SHORT).show(); } public boolean btEnable() { if(btAdapter.isEnabled() == false) { btDisconnect(); Intent btTurnOnIntent = new Intent(BluetoothAdapter.ACTION_REQUEST_ENABLE); startActivityForResult(btTurnOnIntent, requestCode_blRequest); return false; } return true; } public void btConnect() { if(!btEnable()) return; if(btSocket != null && btSocket.isConnected()) return; btDevice = btAdapter.getRemoteDevice(btAddress); try { btSocket = btDevice.createRfcommSocketToServiceRecord(btUUDID); } catch(IOException e) { Log.e("Bluetooth", "Socket creation failed", e); } btAdapter.cancelDiscovery(); try { btSocket.connect(); } catch (IOException e) { try { btSocket.close(); } catch (IOException e2) { Log.e("Bluetooth", "Unable to close socket during connection failure", e2); } return; } btConnected = true; btIcon.setIcon(R.drawable.bluetooth_connected); Toast.makeText(getApplicationContext(), "RollE connected.", Toast.LENGTH_SHORT).show(); try { btInStream = btSocket.getInputStream(); } catch (IOException e) { btSocket = null; Log.e("Bluetooth", "Exception in creating InputStream.", e); } String message = "PID?\n"; try { btSocket.getOutputStream().write(message.getBytes()); } catch (IOException e) { btSocket = null; Log.e("Bluetooth", "Exception during write.", e); } } boolean paused = true; @Override public void onResume() { super.onResume(); paused = false; } @Override public void onPause() { super.onPause(); paused = true; } @Override public void onActivityResult(int requestCode, int resultCode, Intent data) { if(requestCode == requestCode_blRequest) { if(btAdapter.isEnabled()) btConnect(); //Toast.makeText(getApplicationContext(), "Bluetooth turned on.", Toast.LENGTH_SHORT).show(); } } public void setPID(float P, float I, float D) { if(!btConnected) return; try { String message = String.format("PID:%.2f,%.2f,%.2f", P, I, D); Log.d("Bluetooth", message); btSocket.getOutputStream().write(message.getBytes()); } catch (Exception e) { btSocket = null; Log.e("Bluetooth", "Exception during write.", e); } } @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.main, menu); btIcon = menu.getItem(0); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { // Handle action bar item clicks here. The action bar will // automatically handle clicks on the Home/Up button, so long // as you specify a parent activity in AndroidManifest.xml. int id = item.getItemId(); if (id == R.id.action_bluetooth) { if(btConnected) { btDisconnect(); }else{ btConnect(); } } return super.onOptionsItemSelected(item); } @Override public void onTabSelected(ActionBar.Tab tab, FragmentTransaction fragmentTransaction) { // When the given tab is selected, switch to the corresponding page in // the ViewPager. mViewPager.setCurrentItem(tab.getPosition()); } @Override public void onTabUnselected(ActionBar.Tab tab, FragmentTransaction fragmentTransaction) { } @Override public void onTabReselected(ActionBar.Tab tab, FragmentTransaction fragmentTransaction) { } public class SectionsPagerAdapter extends FragmentPagerAdapter { public SectionsPagerAdapter(FragmentManager fm) { super(fm); } @Override public Fragment getItem(int position) { if(position == 0) fragmentList[position] = new OpenGLFragment(); else fragmentList[position] = new PIDFragment(thisActivity); return fragmentList[position]; } @Override public int getCount() { // Show 2 total pages. return 2; } @Override public CharSequence getPageTitle(int position) { Locale l = Locale.getDefault(); switch (position) { case 0: return getString(R.string.title_section1).toUpperCase(l); case 1: return getString(R.string.title_section2).toUpperCase(l); } return null; } } }
/* * Licensed to Metamarkets Group Inc. (Metamarkets) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Metamarkets licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package io.druid.collections.spatial; import com.google.common.primitives.Floats; import com.google.common.primitives.Ints; import io.druid.collections.bitmap.BitmapFactory; import io.druid.collections.bitmap.ImmutableBitmap; import java.nio.ByteBuffer; import java.util.Iterator; /** * Byte layout: * Header * 0 to 1 : the MSB is a boolean flag for isLeaf, the next 15 bits represent the number of children of a node * Body * 2 to 2 + numDims * Floats.BYTES : minCoordinates * 2 + numDims * Floats.BYTES to 2 + 2 * numDims * Floats.BYTES : maxCoordinates * concise set * rest (children) : Every 4 bytes is storing an offset representing the position of a child. * * The child offset is an offset from the initialOffset */ public class ImmutableNode { public static final int HEADER_NUM_BYTES = 2; private final int numDims; private final int initialOffset; private final int offsetFromInitial; private final short numChildren; private final boolean isLeaf; private final int childrenOffset; private final ByteBuffer data; private final BitmapFactory bitmapFactory; public ImmutableNode( int numDims, int initialOffset, int offsetFromInitial, ByteBuffer data, BitmapFactory bitmapFactory ) { this.bitmapFactory = bitmapFactory; this.numDims = numDims; this.initialOffset = initialOffset; this.offsetFromInitial = offsetFromInitial; short header = data.getShort(initialOffset + offsetFromInitial); this.isLeaf = (header & 0x8000) != 0; this.numChildren = (short) (header & 0x7FFF); final int sizePosition = initialOffset + offsetFromInitial + HEADER_NUM_BYTES + 2 * numDims * Floats.BYTES; int bitmapSize = data.getInt(sizePosition); this.childrenOffset = initialOffset + offsetFromInitial + HEADER_NUM_BYTES + 2 * numDims * Floats.BYTES + Ints.BYTES + bitmapSize; this.data = data; } public ImmutableNode( int numDims, int initialOffset, int offsetFromInitial, short numChildren, boolean leaf, ByteBuffer data, BitmapFactory bitmapFactory ) { this.bitmapFactory = bitmapFactory; this.numDims = numDims; this.initialOffset = initialOffset; this.offsetFromInitial = offsetFromInitial; this.numChildren = numChildren; this.isLeaf = leaf; final int sizePosition = initialOffset + offsetFromInitial + HEADER_NUM_BYTES + 2 * numDims * Floats.BYTES; int bitmapSize = data.getInt(sizePosition); this.childrenOffset = initialOffset + offsetFromInitial + HEADER_NUM_BYTES + 2 * numDims * Floats.BYTES + Ints.BYTES + bitmapSize; this.data = data; } public BitmapFactory getBitmapFactory() { return bitmapFactory; } public int getInitialOffset() { return initialOffset; } public int getOffsetFromInitial() { return offsetFromInitial; } public int getNumDims() { return numDims; } public int getNumChildren() { return numChildren; } public boolean isLeaf() { return isLeaf; } public float[] getMinCoordinates() { return getCoords(initialOffset + offsetFromInitial + HEADER_NUM_BYTES); } public float[] getMaxCoordinates() { return getCoords(initialOffset + offsetFromInitial + HEADER_NUM_BYTES + numDims * Floats.BYTES); } public ImmutableBitmap getImmutableBitmap() { final int sizePosition = initialOffset + offsetFromInitial + HEADER_NUM_BYTES + 2 * numDims * Floats.BYTES; int numBytes = data.getInt(sizePosition); data.position(sizePosition + Ints.BYTES); ByteBuffer tmpBuffer = data.slice(); tmpBuffer.limit(numBytes); return bitmapFactory.mapImmutableBitmap(tmpBuffer.asReadOnlyBuffer()); } @SuppressWarnings("ArgumentParameterSwap") public Iterable<ImmutableNode> getChildren() { return new Iterable<ImmutableNode>() { @Override public Iterator<ImmutableNode> iterator() { return new Iterator<ImmutableNode>() { private int count = 0; @Override public boolean hasNext() { return (count < numChildren); } @Override public ImmutableNode next() { if (isLeaf) { return new ImmutablePoint( numDims, initialOffset, data.getInt(childrenOffset + (count++) * Ints.BYTES), data, bitmapFactory ); } return new ImmutableNode( numDims, initialOffset, data.getInt(childrenOffset + (count++) * Ints.BYTES), data, bitmapFactory ); } @Override public void remove() { throw new UnsupportedOperationException(); } }; } }; } public ByteBuffer getData() { return data; } private float[] getCoords(int offset) { final float[] retVal = new float[numDims]; final ByteBuffer readOnlyBuffer = data.asReadOnlyBuffer(); readOnlyBuffer.position(offset); readOnlyBuffer.asFloatBuffer().get(retVal); return retVal; } }
package abi38_0_0.expo.modules.webbrowser; import android.content.Context; import android.content.Intent; import android.content.pm.ResolveInfo; import android.graphics.Color; import android.net.Uri; import android.os.Bundle; import androidx.annotation.Nullable; import androidx.browser.customtabs.CustomTabsIntent; import android.text.TextUtils; import abi38_0_0.org.unimodules.core.ExportedModule; import abi38_0_0.org.unimodules.core.ModuleRegistry; import abi38_0_0.org.unimodules.core.Promise; import abi38_0_0.org.unimodules.core.arguments.ReadableArguments; import abi38_0_0.org.unimodules.core.errors.CurrentActivityNotFoundException; import abi38_0_0.org.unimodules.core.interfaces.ExpoMethod; import java.util.ArrayList; import java.util.List; import abi38_0_0.expo.modules.webbrowser.error.NoPreferredPackageFound; import abi38_0_0.expo.modules.webbrowser.error.PackageManagerNotFoundException; public class WebBrowserModule extends ExportedModule { private final static String BROWSER_PACKAGE_KEY = "browserPackage"; private final static String SERVICE_PACKAGE_KEY = "servicePackage"; private final static String BROWSER_PACKAGES_KEY = "browserPackages"; private final static String SERVICE_PACKAGES_KEY = "servicePackages"; private final static String PREFERRED_BROWSER_PACKAGE = "preferredBrowserPackage"; private final static String DEFAULT_BROWSER_PACKAGE = "defaultBrowserPackage"; private final static String SHOW_IN_RECENTS = "showInRecents"; private final static String DEFAULT_SHARE_MENU_ITEM = "enableDefaultShareMenuItem"; private final static String TOOLBAR_COLOR_KEY = "toolbarColor"; private final static String ERROR_CODE = "EXWebBrowser"; private static final String TAG = "ExpoWebBrowser"; private static final String SHOW_TITLE_KEY = "showTitle"; private static final String ENABLE_BAR_COLLAPSING_KEY = "enableBarCollapsing"; private final static String NO_PREFERRED_PACKAGE_MSG = "Cannot determine preferred package without satisfying it."; private CustomTabsActivitiesHelper mCustomTabsResolver; private CustomTabsConnectionHelper mConnectionHelper; public WebBrowserModule(Context context) { super(context); } @Override public String getName() { return TAG; } @Override public void onCreate(ModuleRegistry moduleRegistry) { mCustomTabsResolver = new CustomTabsActivitiesHelper(moduleRegistry); mConnectionHelper = new CustomTabsConnectionHelper(getContext()); } @ExpoMethod public void warmUpAsync(@Nullable String packageName, final Promise promise) { try { packageName = givenOfPreferredPackageName(packageName); mConnectionHelper.warmUp(packageName); Bundle result = new Bundle(); result.putString(SERVICE_PACKAGE_KEY, packageName); promise.resolve(result); } catch (NoPreferredPackageFound ex) { promise.reject(ex); } } @ExpoMethod public void coolDownAsync(@Nullable String packageName, final Promise promise) { try { packageName = givenOfPreferredPackageName(packageName); if (mConnectionHelper.coolDown(packageName)) { Bundle result = new Bundle(); result.putString(SERVICE_PACKAGE_KEY, packageName); promise.resolve(result); } else { promise.resolve(new Bundle()); } } catch (NoPreferredPackageFound ex) { promise.reject(ex); } } @ExpoMethod public void mayInitWithUrlAsync(@Nullable final String url, String packageName, final Promise promise) { try { packageName = givenOfPreferredPackageName(packageName); mConnectionHelper.mayInitWithUrl(packageName, Uri.parse(url)); Bundle result = new Bundle(); result.putString(SERVICE_PACKAGE_KEY, packageName); promise.resolve(result); } catch (NoPreferredPackageFound ex) { promise.reject(ex); } } @ExpoMethod public void getCustomTabsSupportingBrowsersAsync(final Promise promise) { try { ArrayList<String> activities = mCustomTabsResolver.getCustomTabsResolvingActivities(); ArrayList<String> services = mCustomTabsResolver.getCustomTabsResolvingServices(); String preferredPackage = mCustomTabsResolver.getPreferredCustomTabsResolvingActivity(activities); String defaultPackage = mCustomTabsResolver.getDefaultCustomTabsResolvingActivity(); String defaultCustomTabsPackage = null; if (activities.contains(defaultPackage)) { // It might happen, that default activity does not support Chrome Tabs. Then it will be ResolvingActivity and we don't want to return it as a result. defaultCustomTabsPackage = defaultPackage; } Bundle result = new Bundle(); result.putStringArrayList(BROWSER_PACKAGES_KEY, activities); result.putStringArrayList(SERVICE_PACKAGES_KEY, services); result.putString(PREFERRED_BROWSER_PACKAGE, preferredPackage); result.putString(DEFAULT_BROWSER_PACKAGE, defaultCustomTabsPackage); promise.resolve(result); } catch (CurrentActivityNotFoundException | PackageManagerNotFoundException ex) { promise.reject(ex); } } @ExpoMethod public void openBrowserAsync(final String url, ReadableArguments arguments, final Promise promise) { Intent intent = createCustomTabsIntent(arguments); intent.setData(Uri.parse(url)); try { List<ResolveInfo> activities = mCustomTabsResolver.getResolvingActivities(intent); if (activities.size() > 0) { mCustomTabsResolver.startCustomTabs(intent); Bundle result = new Bundle(); result.putString("type", "opened"); promise.resolve(result); } else { promise.reject(ERROR_CODE, "No matching activity!"); } } catch (CurrentActivityNotFoundException | PackageManagerNotFoundException ex) { promise.reject(ex); } } private Intent createCustomTabsIntent(ReadableArguments arguments) { CustomTabsIntent.Builder builder = new CustomTabsIntent.Builder(); String color = arguments.getString(TOOLBAR_COLOR_KEY); String packageName = arguments.getString(BROWSER_PACKAGE_KEY); try { if (!TextUtils.isEmpty(color)) { int intColor = Color.parseColor(color); builder.setToolbarColor(intColor); } } catch (IllegalArgumentException ignored) { } builder.setShowTitle(arguments.getBoolean(SHOW_TITLE_KEY, false)); if (arguments.containsKey(DEFAULT_SHARE_MENU_ITEM) && arguments.getBoolean(DEFAULT_SHARE_MENU_ITEM)) { builder.addDefaultShareMenuItem(); } Intent intent = builder.build().intent; // We cannot use builder's method enableUrlBarHiding, because there is no corresponding disable method and some browsers enables it by default. intent.putExtra(CustomTabsIntent.EXTRA_ENABLE_URLBAR_HIDING, arguments.getBoolean(ENABLE_BAR_COLLAPSING_KEY, false)); if (!TextUtils.isEmpty(packageName)) { intent.setPackage(packageName); } intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); if (!arguments.getBoolean(SHOW_IN_RECENTS, false)) { intent.addFlags(Intent.FLAG_ACTIVITY_EXCLUDE_FROM_RECENTS); intent.addFlags(Intent.FLAG_ACTIVITY_NO_HISTORY); } return intent; } private String givenOfPreferredPackageName(@Nullable String packageName) throws NoPreferredPackageFound { try { if (TextUtils.isEmpty(packageName)) { packageName = mCustomTabsResolver.getPreferredCustomTabsResolvingActivity(null); } } catch (CurrentActivityNotFoundException | PackageManagerNotFoundException ex) { throw new NoPreferredPackageFound(NO_PREFERRED_PACKAGE_MSG); } if (TextUtils.isEmpty(packageName)) { throw new NoPreferredPackageFound(NO_PREFERRED_PACKAGE_MSG); } return packageName; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.phoenix.end2end; import static org.apache.phoenix.exception.SQLExceptionCode.CANNOT_DROP_PK; import static org.apache.phoenix.exception.SQLExceptionCode.CANNOT_MUTATE_TABLE; import static org.apache.phoenix.exception.SQLExceptionCode.TABLE_UNDEFINED; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.KEY_SEQ; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.ORDINAL_POSITION; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.SYSTEM_CATALOG_SCHEMA; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.SYSTEM_FUNCTION_TABLE; import static org.apache.phoenix.schema.PTableType.SYSTEM; import static org.apache.phoenix.schema.PTableType.TABLE; import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.sql.Connection; import java.sql.DatabaseMetaData; import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.SQLException; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Properties; import java.util.Set; import org.apache.hadoop.hbase.client.Admin; import org.apache.phoenix.coprocessor.TaskRegionObserver; import org.apache.phoenix.exception.SQLExceptionCode; import org.apache.phoenix.jdbc.PhoenixConnection; import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData; import org.apache.phoenix.query.QueryServicesOptions; import org.apache.phoenix.schema.ColumnAlreadyExistsException; import org.apache.phoenix.schema.ColumnNotFoundException; import org.apache.phoenix.schema.PTableType; import org.apache.phoenix.schema.TableAlreadyExistsException; import org.apache.phoenix.schema.TableNotFoundException; import org.apache.phoenix.util.PhoenixRuntime; import org.apache.phoenix.util.PropertiesUtil; import org.apache.phoenix.util.SchemaUtil; import org.apache.phoenix.util.StringUtil; import org.junit.Test; public class TenantSpecificTablesDDLIT extends BaseTenantSpecificTablesIT { @Test public void testCreateTenantSpecificTable() throws Exception { // ensure we didn't create a physical HBase table for the tenant-specific table Connection conn = DriverManager.getConnection(getUrl(), PropertiesUtil.deepCopy(TEST_PROPERTIES)); Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin(); assertEquals(0, admin.listTables(TENANT_TABLE_NAME).length); } @Test public void testCreateTenantTableTwice() throws Exception { try { Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); Connection conn = DriverManager.getConnection(PHOENIX_JDBC_TENANT_SPECIFIC_URL, props); conn.createStatement().execute(TENANT_TABLE_DDL); fail(); } catch (TableAlreadyExistsException expected) {} } @Test public void testCreateTenantViewFromNonMultiTenant() throws Exception { String tableName = generateUniqueName(); createTestTable(getUrl(), "CREATE TABLE " + tableName + " (K VARCHAR PRIMARY KEY)"); try { String viewName = generateUniqueName(); // Only way to get this exception is to attempt to derive from a global, multi-type table, as we won't find // a tenant-specific table when we attempt to resolve the base table. createTestTable(PHOENIX_JDBC_TENANT_SPECIFIC_URL, "CREATE VIEW " + viewName + " (COL VARCHAR) AS SELECT * FROM " + tableName); } catch (TableNotFoundException expected) { } } @Test public void testAlteringMultiTenancyForTableWithViewsNotAllowed() throws Exception { Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); String multiTenantTable = "MT_" + generateUniqueName(); String globalTable = "G_" + generateUniqueName(); // create the two base tables try (Connection conn = DriverManager.getConnection(getUrl(), props)) { String ddl = "CREATE TABLE " + multiTenantTable + " (TENANT_ID VARCHAR NOT NULL, PK1 VARCHAR NOT NULL, V1 VARCHAR, V2 VARCHAR, V3 VARCHAR CONSTRAINT NAME_PK PRIMARY KEY(TENANT_ID, PK1)) MULTI_TENANT = true "; conn.createStatement().execute(ddl); ddl = "CREATE TABLE " + globalTable + " (TENANT_ID VARCHAR NOT NULL, PK1 VARCHAR NOT NULL, V1 VARCHAR, V2 VARCHAR, V3 VARCHAR CONSTRAINT NAME_PK PRIMARY KEY(TENANT_ID, PK1)) "; conn.createStatement().execute(ddl); } String t1 = generateUniqueName(); props.setProperty(PhoenixRuntime.TENANT_ID_ATTRIB, t1); // create view on multi-tenant table try (Connection tenantConn = DriverManager.getConnection(getUrl(), props)) { String viewName = "V_" + generateUniqueName(); String viewDDL = "CREATE VIEW " + viewName + " AS SELECT * FROM " + multiTenantTable; tenantConn.createStatement().execute(viewDDL); } props = PropertiesUtil.deepCopy(TEST_PROPERTIES); // create view on global table try (Connection conn = DriverManager.getConnection(getUrl(), props)) { String viewName = "V_" + generateUniqueName(); conn.createStatement().execute("CREATE VIEW " + viewName + " AS SELECT * FROM " + globalTable); } props = PropertiesUtil.deepCopy(TEST_PROPERTIES); try (Connection conn = DriverManager.getConnection(getUrl(), props)) { try { conn.createStatement().execute("ALTER TABLE " + globalTable + " SET MULTI_TENANT = " + true); fail(); } catch (SQLException e) { assertEquals(SQLExceptionCode.CANNOT_MUTATE_TABLE.getErrorCode(), e.getErrorCode()); } try { conn.createStatement().execute("ALTER TABLE " + multiTenantTable + " SET MULTI_TENANT = " + false); fail(); } catch (SQLException e) { assertEquals(SQLExceptionCode.CANNOT_MUTATE_TABLE.getErrorCode(), e.getErrorCode()); } } } @Test(expected=TableNotFoundException.class) public void testDeletionOfParentTableFailsOnTenantSpecificConnection() throws Exception { Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); props.setProperty(PhoenixRuntime.TENANT_ID_ATTRIB, TENANT_ID); // connection is tenant-specific Connection conn = DriverManager.getConnection(getUrl(), props); conn.createStatement().execute("DROP TABLE " + PARENT_TABLE_NAME); conn.close(); } public void testCreationOfParentTableFailsOnTenantSpecificConnection() throws Exception { try { createTestTable(PHOENIX_JDBC_TENANT_SPECIFIC_URL, "CREATE TABLE " + generateUniqueName() + "( \n" + " \"user\" VARCHAR ,\n" + " id INTEGER not null primary key desc\n" + " ) "); fail(); } catch (SQLException e) { assertEquals(SQLExceptionCode.CANNOT_CREATE_TENANT_SPECIFIC_TABLE.getErrorCode(), e.getErrorCode()); } } @Test public void testTenantSpecificAndParentTablesMayBeInDifferentSchemas() throws SQLException { String fullTableName = "DIFFSCHEMA." + generateUniqueName(); createTestTable(PHOENIX_JDBC_TENANT_SPECIFIC_URL, "CREATE VIEW " + fullTableName + " ( \n" + " tenant_col VARCHAR) AS SELECT * \n" + " FROM " + PARENT_TABLE_NAME + " WHERE tenant_type_id = 'aaa'"); try { createTestTable(PHOENIX_JDBC_TENANT_SPECIFIC_URL, "CREATE VIEW " + fullTableName + "( \n" + " tenant_col VARCHAR) AS SELECT *\n"+ " FROM DIFFSCHEMA." + PARENT_TABLE_NAME + " WHERE tenant_type_id = 'aaa'"); fail(); } catch (SQLException expected) { assertEquals(TABLE_UNDEFINED.getErrorCode(), expected.getErrorCode()); } String newDDL = "CREATE TABLE DIFFSCHEMA." + PARENT_TABLE_NAME + " ( \n" + " \"user\" VARCHAR ,\n" + " tenant_id VARCHAR(5) NOT NULL,\n" + " tenant_type_id VARCHAR(3) NOT NULL, \n" + " id INTEGER NOT NULL\n" + " CONSTRAINT pk PRIMARY KEY (tenant_id, tenant_type_id, id)) MULTI_TENANT=true"; createTestTable(getUrl(), newDDL); createTestTable(PHOENIX_JDBC_TENANT_SPECIFIC_URL, "CREATE VIEW " + fullTableName + "( \n" + " tenant_col VARCHAR) AS SELECT *\n"+ " FROM DIFFSCHEMA." + PARENT_TABLE_NAME + " WHERE tenant_type_id = 'aaa'"); } @Test public void testTenantSpecificTableCanDeclarePK() throws SQLException { createTestTable(PHOENIX_JDBC_TENANT_SPECIFIC_URL, "CREATE VIEW " + generateUniqueName() + "( \n" + " tenant_col VARCHAR PRIMARY KEY) AS SELECT *\n" + " FROM " + PARENT_TABLE_NAME); } @Test(expected=ColumnAlreadyExistsException.class) public void testTenantSpecificTableCannotOverrideParentCol() throws SQLException { createTestTable(PHOENIX_JDBC_TENANT_SPECIFIC_URL, "CREATE VIEW " + generateUniqueName() + " ( \n" + " \"user\" INTEGER) AS SELECT *\n" + " FROM " + PARENT_TABLE_NAME); } @Test public void testBaseTableWrongFormatWithTenantTypeId() throws Exception { // only two PK columns for multi_tenant, multi_type try { createTestTable(getUrl(), "CREATE TABLE " + generateUniqueName() + "(TENANT_ID VARCHAR NOT NULL PRIMARY KEY, ID VARCHAR, A INTEGER) MULTI_TENANT=true"); fail(); } catch (SQLException expected) { assertEquals(SQLExceptionCode.INSUFFICIENT_MULTI_TENANT_COLUMNS.getErrorCode(), expected.getErrorCode()); } } @Test public void testAddDropColumn() throws Exception { Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); Connection conn = DriverManager.getConnection(PHOENIX_JDBC_TENANT_SPECIFIC_URL, props); conn.setAutoCommit(true); try { conn.createStatement().execute("upsert into " + TENANT_TABLE_NAME + " (id, tenant_col) values (1, 'Viva Las Vegas')"); conn.createStatement().execute("alter view " + TENANT_TABLE_NAME + " add tenant_col2 char(1) null"); conn.createStatement().execute("upsert into " + TENANT_TABLE_NAME + " (id, tenant_col2) values (2, 'a')"); ResultSet rs = conn.createStatement().executeQuery("select count(*) from " + TENANT_TABLE_NAME); rs.next(); assertEquals(2, rs.getInt(1)); rs = conn.createStatement().executeQuery("select count(*) from " + TENANT_TABLE_NAME + " where tenant_col2 = 'a'"); rs.next(); assertEquals(1, rs.getInt(1)); conn.createStatement().execute("alter view " + TENANT_TABLE_NAME + " drop column tenant_col"); rs = conn.createStatement().executeQuery("select count(*) from " + TENANT_TABLE_NAME + ""); rs.next(); assertEquals(2, rs.getInt(1)); try { rs = conn.createStatement().executeQuery("select tenant_col from " + TENANT_TABLE_NAME); fail(); } catch (ColumnNotFoundException expected) {} } finally { conn.close(); } } @Test public void testDropOfPKInTenantTablesNotAllowed() throws Exception { Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); Connection conn = DriverManager.getConnection(PHOENIX_JDBC_TENANT_SPECIFIC_URL, props); try { // try removing a PK col try { conn.createStatement().execute("alter table " + TENANT_TABLE_NAME + " drop column id"); fail(); } catch (SQLException expected) { assertEquals(CANNOT_DROP_PK.getErrorCode(), expected.getErrorCode()); } } finally { conn.close(); } } @Test public void testColumnMutationInParentTableWithExistingTenantTable() throws Exception { Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); Connection conn = DriverManager.getConnection(getUrl(), props); try { try { conn.createStatement().execute("alter table " + PARENT_TABLE_NAME + " drop column id"); fail(); } catch (SQLException expected) { assertEquals(CANNOT_DROP_PK.getErrorCode(), expected.getErrorCode()); } // try removing a non-PK col, which is allowed try { conn.createStatement().execute("alter table " + PARENT_TABLE_NAME + " drop column \"user\""); } catch (SQLException expected) { fail("We should be able to drop a non pk base table column"); } } finally { conn.close(); } } @Test public void testDisallowDropParentTableWithExistingTenantTable() throws Exception { Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); Connection conn = DriverManager.getConnection(getUrl(), props); try { conn.createStatement().executeUpdate("drop table " + PARENT_TABLE_NAME); fail("Should not have been allowed to drop a parent table to which tenant-specific tables still point."); } catch (SQLException expected) { assertEquals(CANNOT_MUTATE_TABLE.getErrorCode(), expected.getErrorCode()); } finally { conn.close(); } } @Test public void testAllowDropParentTableWithCascadeAndSingleTenantTable() throws Exception { Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); Connection conn = DriverManager.getConnection(getUrl(), props); Connection connTenant = null; try { // Drop Parent Table conn.createStatement().executeUpdate("DROP TABLE " + PARENT_TABLE_NAME + " CASCADE"); connTenant = DriverManager.getConnection(PHOENIX_JDBC_TENANT_SPECIFIC_URL, props); validateTenantViewIsDropped(conn); } finally { if (conn != null) { conn.close(); } if (connTenant != null) { connTenant.close(); } } } @Test public void testAllDropParentTableWithCascadeWithMultipleTenantTablesAndIndexes() throws Exception { // Create a second tenant table String tenantTable2 = "V_" + generateUniqueName(); createTestTable(PHOENIX_JDBC_TENANT_SPECIFIC_URL2, TENANT_TABLE_DDL.replace(TENANT_TABLE_NAME, tenantTable2)); //TODO Create some tenant specific table indexes Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); Connection conn = null; Connection connTenant1 = null; Connection connTenant2 = null; try { List<String> sortedCatalogs = Arrays.asList(TENANT_ID, TENANT_ID2); Collections.sort(sortedCatalogs); conn = DriverManager.getConnection(getUrl(), props); DatabaseMetaData meta = conn.getMetaData(); ResultSet rs = meta.getTables(null, "", StringUtil.escapeLike(TENANT_TABLE_NAME), new String[] {PTableType.VIEW.getValue().getString()}); assertTrue(rs.next()); assertEquals(TENANT_ID, rs.getString(PhoenixDatabaseMetaData.TABLE_CAT)); assertTableMetaData(rs, null, TENANT_TABLE_NAME, PTableType.VIEW); assertFalse(rs.next()); rs = meta.getTables(null, "", StringUtil.escapeLike(tenantTable2), new String[] {PTableType.VIEW.getValue().getString()}); assertTrue(rs.next()); assertEquals(TENANT_ID2, rs.getString(PhoenixDatabaseMetaData.TABLE_CAT)); assertTableMetaData(rs, null, tenantTable2, PTableType.VIEW); assertFalse(rs.next()); rs = meta.getTables(null, "", StringUtil.escapeLike(TENANT_TABLE_NAME_NO_TENANT_TYPE_ID), new String[] {PTableType.VIEW.getValue().getString()}); assertTrue(rs.next()); assertEquals(TENANT_ID, rs.getString(PhoenixDatabaseMetaData.TABLE_CAT)); assertTableMetaData(rs, null, TENANT_TABLE_NAME_NO_TENANT_TYPE_ID, PTableType.VIEW); assertFalse(rs.next()); // Drop Parent Table conn.createStatement().executeUpdate("DROP TABLE " + PARENT_TABLE_NAME + " CASCADE"); TaskRegionObserver.SelfHealingTask task = new TaskRegionObserver.SelfHealingTask (TaskRegionEnvironment, QueryServicesOptions.DEFAULT_TASK_HANDLING_MAX_INTERVAL_MS); task.run(); // Validate Tenant Views are dropped connTenant1 = DriverManager.getConnection(PHOENIX_JDBC_TENANT_SPECIFIC_URL, props); validateTenantViewIsDropped(connTenant1); connTenant2 = DriverManager.getConnection(PHOENIX_JDBC_TENANT_SPECIFIC_URL2, props); validateTenantViewIsDropped(connTenant2); // Validate Tenant Metadata is gone for the Tenant Table TENANT_TABLE_NAME rs = meta.getTables(null, "", StringUtil.escapeLike(TENANT_TABLE_NAME), new String[] {PTableType.VIEW.getValue().getString()}); assertFalse(rs.next()); rs = meta.getTables(null, "", StringUtil.escapeLike(tenantTable2) , new String[] {PTableType.VIEW.getValue().getString()}); assertFalse(rs.next()); rs = meta.getTables(null, "", StringUtil.escapeLike(TENANT_TABLE_NAME_NO_TENANT_TYPE_ID), new String[] {PTableType.VIEW.getValue().getString()}); assertTrue(rs.next()); assertEquals(TENANT_ID, rs.getString(PhoenixDatabaseMetaData.TABLE_CAT)); assertTableMetaData(rs, null, TENANT_TABLE_NAME_NO_TENANT_TYPE_ID, PTableType.VIEW); assertFalse(rs.next()); } finally { if (conn != null) { conn.close(); } if (connTenant1 != null) { connTenant1.close(); } if (connTenant2 != null) { connTenant2.close(); } } } private void validateTenantViewIsDropped(Connection connTenant) throws SQLException { try { PhoenixRuntime.getTableNoCache(connTenant, TENANT_TABLE_NAME); fail("Tenant specific view " + TENANT_TABLE_NAME + " should have been dropped when parent was dropped"); } catch (TableNotFoundException e) { //Expected } // Try and drop tenant view, should throw TableNotFoundException try { String ddl = "DROP VIEW " + TENANT_TABLE_NAME; connTenant.createStatement().execute(ddl); fail("Tenant specific view " + TENANT_TABLE_NAME + " should have been dropped when parent was dropped"); } catch (TableNotFoundException e) { //Expected } } @Test public void testTableMetadataScan() throws Exception { // create a tenant table with same name for a different tenant to make sure we are not picking it up in metadata scans for TENANT_ID String tenantId2 = "T_" + generateUniqueName(); String secondTenatConnectionURL = PHOENIX_JDBC_TENANT_SPECIFIC_URL.replace(TENANT_ID, tenantId2); String tenantTable2 = "V_" + generateUniqueName(); createTestTable(secondTenatConnectionURL, TENANT_TABLE_DDL.replace(TENANT_TABLE_NAME, tenantTable2)); Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); Connection conn = DriverManager.getConnection(getUrl(), props); try { // empty string means global tenant id // make sure connections w/o tenant id only see non-tenant-specific tables, both SYSTEM and USER DatabaseMetaData meta = conn.getMetaData(); ResultSet rs = meta.getTables("", "", StringUtil.escapeLike(PARENT_TABLE_NAME), new String[] {TABLE.getValue().getString()}); assertTrue(rs.next()); assertTableMetaData(rs, null, PARENT_TABLE_NAME, TABLE); assertFalse(rs.next()); rs = meta.getTables("", "", StringUtil.escapeLike(PARENT_TABLE_NAME_NO_TENANT_TYPE_ID), new String[] {TABLE.getValue().getString()}); assertTrue(rs.next()); assertTableMetaData(rs, null, PARENT_TABLE_NAME_NO_TENANT_TYPE_ID, TABLE); assertFalse(rs.next()); // make sure connections w/o tenant id only see non-tenant-specific columns rs = meta.getColumns("", null, null, null); while (rs.next()) { assertNotEquals(TENANT_TABLE_NAME, rs.getString("TABLE_NAME")); assertNotEquals(tenantTable2, rs.getString("TABLE_NAME")); } List<String> sortedTableNames = Arrays.asList(TENANT_TABLE_NAME, TENANT_TABLE_NAME_NO_TENANT_TYPE_ID); Collections.sort(sortedTableNames); List<String> sortedParentNames; if (sortedTableNames.get(0).equals(TENANT_TABLE_NAME)) { sortedParentNames = Arrays.asList(PARENT_TABLE_NAME, PARENT_TABLE_NAME_NO_TENANT_TYPE_ID); } else { sortedParentNames = Arrays.asList(PARENT_TABLE_NAME_NO_TENANT_TYPE_ID, PARENT_TABLE_NAME); } rs = meta.getSuperTables(TENANT_ID, null, null); assertTrue(rs.next()); assertEquals(TENANT_ID, rs.getString(PhoenixDatabaseMetaData.TABLE_CAT)); assertEquals(sortedTableNames.get(0), rs.getString(PhoenixDatabaseMetaData.TABLE_NAME)); assertEquals(sortedParentNames.get(0), rs.getString(PhoenixDatabaseMetaData.SUPERTABLE_NAME)); assertTrue(rs.next()); assertEquals(TENANT_ID, rs.getString(PhoenixDatabaseMetaData.TABLE_CAT)); assertEquals(sortedTableNames.get(1), rs.getString(PhoenixDatabaseMetaData.TABLE_NAME)); assertEquals(sortedParentNames.get(1), rs.getString(PhoenixDatabaseMetaData.SUPERTABLE_NAME)); assertFalse(rs.next()); rs = meta.getSuperTables(tenantId2, null, null); assertTrue(rs.next()); assertEquals(tenantId2, rs.getString(PhoenixDatabaseMetaData.TABLE_CAT)); assertEquals(tenantTable2, rs.getString(PhoenixDatabaseMetaData.TABLE_NAME)); assertEquals(PARENT_TABLE_NAME, rs.getString(PhoenixDatabaseMetaData.SUPERTABLE_NAME)); assertFalse(rs.next()); Set<String> sortedCatalogs = new HashSet<>(Arrays.asList(TENANT_ID, tenantId2)); rs = conn.getMetaData().getCatalogs(); while (rs.next()) { sortedCatalogs.remove(rs.getString(PhoenixDatabaseMetaData.TABLE_CAT)); } assertTrue("Should have found both tenant IDs", sortedCatalogs.isEmpty()); } finally { props.clear(); conn.close(); } conn = DriverManager.getConnection(PHOENIX_JDBC_TENANT_SPECIFIC_URL, props); try { // make sure tenant-specific connections only see their own tables and the global tables DatabaseMetaData meta = conn.getMetaData(); ResultSet rs = meta.getTables("", SYSTEM_CATALOG_SCHEMA, null, new String[] {PTableType.SYSTEM.getValue().getString()}); assertTrue(rs.next()); assertTableMetaData(rs, PhoenixDatabaseMetaData.SYSTEM_CATALOG_SCHEMA, PhoenixDatabaseMetaData.SYSTEM_CATALOG_TABLE, PTableType.SYSTEM); assertTrue(rs.next()); assertTableMetaData(rs, PhoenixDatabaseMetaData.SYSTEM_CATALOG_SCHEMA, PhoenixDatabaseMetaData.SYSTEM_CHILD_LINK_TABLE, PTableType.SYSTEM); assertTrue(rs.next()); assertTableMetaData(rs, SYSTEM_CATALOG_SCHEMA, SYSTEM_FUNCTION_TABLE, SYSTEM); assertTrue(rs.next()); assertTableMetaData(rs, PhoenixDatabaseMetaData.SYSTEM_CATALOG_SCHEMA, PhoenixDatabaseMetaData.SYSTEM_LOG_TABLE, PTableType.SYSTEM); assertTrue(rs.next()); assertTableMetaData(rs, PhoenixDatabaseMetaData.SYSTEM_CATALOG_SCHEMA, PhoenixDatabaseMetaData.SYSTEM_MUTEX_TABLE_NAME, PTableType.SYSTEM); assertTrue(rs.next()); assertTableMetaData(rs, PhoenixDatabaseMetaData.SYSTEM_CATALOG_SCHEMA, PhoenixDatabaseMetaData.TYPE_SEQUENCE, PTableType.SYSTEM); assertTrue(rs.next()); assertTableMetaData(rs, SYSTEM_CATALOG_SCHEMA, PhoenixDatabaseMetaData.SYSTEM_STATS_TABLE, PTableType.SYSTEM); assertTrue(rs.next()); assertTableMetaData(rs, SYSTEM_CATALOG_SCHEMA, PhoenixDatabaseMetaData.SYSTEM_TASK_TABLE, PTableType.SYSTEM); assertFalse(rs.next()); rs = meta.getTables(null, "", StringUtil.escapeLike(tenantTable2), new String[] {TABLE.getValue().getString()}); assertFalse(rs.next()); rs = meta.getTables(null, "", StringUtil.escapeLike(PARENT_TABLE_NAME), new String[] {TABLE.getValue().getString()}); assertTrue(rs.next()); assertTableMetaData(rs, null, PARENT_TABLE_NAME, TABLE); assertFalse(rs.next()); rs = meta.getTables(null, "", StringUtil.escapeLike(PARENT_TABLE_NAME_NO_TENANT_TYPE_ID), new String[] {TABLE.getValue().getString()}); assertTrue(rs.next()); assertTableMetaData(rs, null, PARENT_TABLE_NAME_NO_TENANT_TYPE_ID, TABLE); assertFalse(rs.next()); rs = meta.getTables(null, "", StringUtil.escapeLike(TENANT_TABLE_NAME), new String[] {PTableType.VIEW.getValue().getString()}); assertTrue(rs.next()); assertTableMetaData(rs, null, TENANT_TABLE_NAME, PTableType.VIEW); assertFalse(rs.next()); rs = meta.getTables(null, "", StringUtil.escapeLike(TENANT_TABLE_NAME_NO_TENANT_TYPE_ID), new String[] {PTableType.VIEW.getValue().getString()}); assertTrue(rs.next()); assertTableMetaData(rs, null, TENANT_TABLE_NAME_NO_TENANT_TYPE_ID, PTableType.VIEW); assertFalse(rs.next()); // make sure tenants see parent table's columns and their own rs = meta.getColumns(null, null, StringUtil.escapeLike(TENANT_TABLE_NAME), null); assertTrue(rs.next()); assertColumnMetaData(rs, null, TENANT_TABLE_NAME, "\"user\"", 1); assertTrue(rs.next()); // (tenant_id column is not visible in tenant-specific connection) assertColumnMetaData(rs, null, TENANT_TABLE_NAME, "tenant_type_id", 2); assertEquals(1, rs.getShort(KEY_SEQ)); assertTrue(rs.next()); assertColumnMetaData(rs, null, TENANT_TABLE_NAME, "id", 3); assertTrue(rs.next()); assertColumnMetaData(rs, null, TENANT_TABLE_NAME, "tenant_col", 4); assertFalse(rs.next()); rs = meta.getColumns(null, null, StringUtil.escapeLike(TENANT_TABLE_NAME_NO_TENANT_TYPE_ID), null); assertTrue(rs.next()); assertColumnMetaData(rs, null, TENANT_TABLE_NAME_NO_TENANT_TYPE_ID, "\"user\"", 1); assertTrue(rs.next()); // (tenant_id column is not visible in tenant-specific connection) assertColumnMetaData(rs, null, TENANT_TABLE_NAME_NO_TENANT_TYPE_ID, "id", 2); assertTrue(rs.next()); assertColumnMetaData(rs, null, TENANT_TABLE_NAME_NO_TENANT_TYPE_ID, "tenant_col", 3); assertFalse(rs.next()); } finally { conn.close(); } } private void assertTableMetaData(ResultSet rs, String schema, String table, PTableType tableType) throws SQLException { assertEquals(schema, rs.getString("TABLE_SCHEM")); assertEquals(table, rs.getString("TABLE_NAME")); assertEquals(tableType.toString(), rs.getString("TABLE_TYPE")); } private void assertColumnMetaData(ResultSet rs, String schema, String table, String column) throws SQLException { assertEquals(schema, rs.getString("TABLE_SCHEM")); assertEquals(table, rs.getString("TABLE_NAME")); assertEquals(SchemaUtil.normalizeIdentifier(column), rs.getString("COLUMN_NAME")); } private void assertColumnMetaData(ResultSet rs, String schema, String table, String column, int ordinalPosition) throws SQLException { assertColumnMetaData(rs, schema, table, column); assertEquals(ordinalPosition, rs.getInt(ORDINAL_POSITION)); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.jackrabbit.oak.plugins.tree.impl; import java.util.List; import javax.annotation.Nonnull; import javax.annotation.Nullable; import com.google.common.base.Function; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import org.apache.jackrabbit.JcrConstants; import org.apache.jackrabbit.oak.AbstractSecurityTest; import org.apache.jackrabbit.oak.api.PropertyState; import org.apache.jackrabbit.oak.api.Tree; import org.apache.jackrabbit.oak.plugins.tree.TreeConstants; import org.apache.jackrabbit.oak.plugins.tree.TreeType; import org.apache.jackrabbit.oak.plugins.tree.TreeTypeProvider; import org.apache.jackrabbit.oak.plugins.tree.TreeUtil; import org.apache.jackrabbit.oak.plugins.tree.factories.RootFactory; import org.apache.jackrabbit.oak.spi.security.authorization.AuthorizationConfiguration; import org.apache.jackrabbit.oak.util.NodeUtil; import org.apache.jackrabbit.util.Text; import org.junit.After; import org.junit.Before; import org.junit.Test; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; public class ImmutableTreeTest extends AbstractSecurityTest { private static final String HIDDEN_PATH = "/oak:index/acPrincipalName/:index"; private ImmutableTree immutable; @Before public void setUp() throws Exception { Tree tree = root.getTree("/"); NodeUtil node = new NodeUtil(tree); node.addChild("x", JcrConstants.NT_UNSTRUCTURED).addChild("y", JcrConstants.NT_UNSTRUCTURED).addChild("z", JcrConstants.NT_UNSTRUCTURED); Tree orderable = node.addChild("orderable", JcrConstants.NT_UNSTRUCTURED).getTree(); orderable.setOrderableChildren(true); root.commit(); immutable = new ImmutableTree(((AbstractTree) root.getTree("/")).getNodeState()); } @After public void tearDown() { root = null; } @Test public void testGetPath() { assertEquals("/", immutable.getPath()); immutable = immutable.getChild("x"); assertEquals("/x", immutable.getPath()); immutable = immutable.getChild("y"); assertEquals("/x/y", immutable.getPath()); immutable = immutable.getChild("z"); assertEquals("/x/y/z", immutable.getPath()); } @Test public void testGetNodeState() { assertNotNull(immutable.getNodeState()); for (Tree child : immutable.getChildren()) { assertTrue(child instanceof ImmutableTree); assertNotNull(((ImmutableTree) child).getNodeState()); } } @Test public void testRootIsRoot() { assertTrue(immutable.isRoot()); } @Test(expected = IllegalStateException.class) public void testRootGetParent() { immutable.getParent(); } @Test public void testGetParent() { ImmutableTree child = immutable.getChild("x"); assertNotNull(child.getParent()); assertEquals("/", child.getParent().getPath()); } @Test(expected = UnsupportedOperationException.class) public void testGetParentDisconnected() { ImmutableTree child = immutable.getChild("x"); ImmutableTree disconnected = new ImmutableTree(ImmutableTree.ParentProvider.UNSUPPORTED, child.getName(), child.getNodeState()); disconnected.getParent(); } @Test public void testGetName() { assertEquals("x", immutable.getChild("x").getName()); } @Test public void testHiddenGetName() { assertEquals(Text.getName(HIDDEN_PATH), getHiddenTree(immutable).getName()); } @Test public void testNonExistingGetName() { assertEquals("nonExisting", immutable.getChild("nonExisting").getName()); } @Test public void testRootGetName() { assertEquals("", immutable.getName()); } @Test public void testExists() { ImmutableTree child = immutable.getChild("x"); assertTrue(child.exists()); } @Test public void testHiddenExists() { assertTrue(getHiddenTree(immutable).exists()); } @Test public void testNonExisting() { ImmutableTree child = immutable.getChild("nonExisting"); assertNotNull(child); assertFalse(child.exists()); } @Test public void testRootGetStatus() { assertSame(Tree.Status.UNCHANGED, immutable.getStatus()); } @Test public void testGetStatus() { assertSame(Tree.Status.UNCHANGED, immutable.getChild("x").getStatus()); } @Test public void testHiddenGetStatus() { assertSame(Tree.Status.UNCHANGED, getHiddenTree(immutable).getStatus()); } @Test public void testNonExistingGetStatus() { assertSame(Tree.Status.UNCHANGED, immutable.getChild("nonExisting").getStatus()); } @Test public void testHasChild() { assertTrue(immutable.hasChild("x")); } @Test public void testHasHiddenChild() { ImmutableTree parent = (ImmutableTree) TreeUtil.getTree(immutable, Text.getRelativeParent(HIDDEN_PATH, 1)); assertNotNull(parent); assertTrue(parent.hasChild(Text.getName(HIDDEN_PATH))); } @Test public void testGetHiddenNode() { ImmutableTree hidden = getHiddenTree(immutable); assertNotNull(hidden); } @Test public void testHasHiddenProperty() { ImmutableTree orderable = immutable.getChild("orderable"); assertTrue(orderable.hasProperty(TreeConstants.OAK_CHILD_ORDER)); } @Test public void testGetHiddenProperty() { ImmutableTree orderable = immutable.getChild("orderable"); assertNotNull(orderable.getProperty(TreeConstants.OAK_CHILD_ORDER)); } @Test public void testGetPropertyStatus() { ImmutableTree orderable = immutable.getChild("orderable"); assertSame(Tree.Status.UNCHANGED, orderable.getPropertyStatus(TreeConstants.OAK_CHILD_ORDER)); } @Test public void testGetProperties() { ImmutableTree orderable = immutable.getChild("orderable"); List<String> propNames = Lists.newArrayList(TreeConstants.OAK_CHILD_ORDER, JcrConstants.JCR_PRIMARYTYPE); for (PropertyState ps : orderable.getProperties()) { assertTrue(propNames.remove(ps.getName())); } assertEquals(2, orderable.getPropertyCount()); } @Test public void testGetPropertyCount() { ImmutableTree orderable = immutable.getChild("orderable"); assertEquals(2, orderable.getPropertyCount()); } @Test public void orderBefore() throws Exception { Tree t = root.getTree("/x/y/z"); NodeUtil n = new NodeUtil(t); n.addChild("node1", JcrConstants.NT_UNSTRUCTURED); n.addChild("node2", JcrConstants.NT_UNSTRUCTURED); n.addChild("node3", JcrConstants.NT_UNSTRUCTURED); t.getChild("node1").orderBefore("node2"); t.getChild("node3").orderBefore(null); root.commit(); ImmutableTree tree = new ImmutableTree(((AbstractTree) t).getNodeState()); assertSequence(tree.getChildren(), "node1", "node2", "node3"); t.getChild("node3").orderBefore("node2"); root.commit(); tree = new ImmutableTree(((AbstractTree) t).getNodeState()); assertSequence(tree.getChildren(), "node1", "node3", "node2"); t.getChild("node1").orderBefore(null); root.commit(); tree = new ImmutableTree(((AbstractTree) t).getNodeState()); assertSequence(tree.getChildren(), "node3", "node2", "node1"); } private static ImmutableTree getHiddenTree(@Nonnull ImmutableTree immutable) { return (ImmutableTree) TreeUtil.getTree(immutable, HIDDEN_PATH); } private static void assertSequence(Iterable<Tree> trees, String... names) { List<String> actual = Lists.newArrayList(Iterables.transform(trees, new Function<Tree, String>() { @Nullable @Override public String apply(Tree input) { return input.getName(); } })); assertEquals(Lists.newArrayList(names), actual); } @Test public void testSetType() { assertNull(immutable.getType()); immutable.setType(TreeType.VERSION); assertSame(TreeType.VERSION, immutable.getType()); immutable.setType(TreeType.DEFAULT); assertSame(TreeType.DEFAULT, immutable.getType()); } @Test public void testGetTypeForImmutableTree() { TreeTypeProvider typeProvider = new TreeTypeProvider(getConfig(AuthorizationConfiguration.class).getContext()); for (String path : new String[] {"/", "/testPath"}) { Tree t = RootFactory.createReadOnlyRoot(root).getTree(path); assertEquals(TreeType.DEFAULT, typeProvider.getType(t)); // also for repeated calls assertEquals(TreeType.DEFAULT, typeProvider.getType(t)); // the type of an immutable tree is set after the first call irrespective of the passed parent type. assertEquals(TreeType.DEFAULT, typeProvider.getType(t, TreeType.DEFAULT)); assertEquals(TreeType.DEFAULT, typeProvider.getType(t, TreeType.HIDDEN)); } } @Test public void testGetTypeForImmutableTreeWithParent() { TreeTypeProvider typeProvider = new TreeTypeProvider(getConfig(AuthorizationConfiguration.class).getContext()); Tree t = RootFactory.createReadOnlyRoot(root).getTree("/:hidden/testPath"); assertEquals(TreeType.HIDDEN, typeProvider.getType(t, TreeType.HIDDEN)); // the type of an immutable tree is set after the first call irrespective of the passed parent type. assertEquals(TreeType.HIDDEN, typeProvider.getType(t)); assertEquals(TreeType.HIDDEN, typeProvider.getType(t, TreeType.DEFAULT)); assertEquals(TreeType.HIDDEN, typeProvider.getType(t, TreeType.ACCESS_CONTROL)); assertEquals(TreeType.HIDDEN, typeProvider.getType(t, TreeType.VERSION)); } }
package org.motechproject.nms.api.web; import org.motechproject.commons.date.util.DateUtil; import org.motechproject.nms.api.web.contract.FlwUserResponse; import org.motechproject.nms.api.web.contract.UserResponse; import org.motechproject.nms.api.web.contract.kilkari.KilkariUserResponse; import org.motechproject.nms.api.web.domain.AnonymousCallAudit; import org.motechproject.nms.api.web.domain.InactiveJobCallAudit; import org.motechproject.nms.api.web.exception.NotAuthorizedException; import org.motechproject.nms.api.web.exception.NotDeployedException; import org.motechproject.nms.api.web.repository.AnonymousCallAuditDataService; import org.motechproject.nms.api.web.repository.InactiveJobCallAuditDataService; import org.motechproject.nms.flw.domain.FrontLineWorker; import org.motechproject.nms.flw.domain.FrontLineWorkerStatus; import org.motechproject.nms.flw.domain.ServiceUsage; import org.motechproject.nms.flw.domain.ServiceUsageCap; import org.motechproject.nms.flw.domain.FlwJobStatus; import org.motechproject.nms.flw.service.FrontLineWorkerService; import org.motechproject.nms.flw.service.ServiceUsageCapService; import org.motechproject.nms.flw.service.ServiceUsageService; import org.motechproject.nms.kilkari.domain.MctsChild; import org.motechproject.nms.kilkari.domain.MctsMother; import org.motechproject.nms.kilkari.domain.Subscriber; import org.motechproject.nms.kilkari.domain.Subscription; import org.motechproject.nms.kilkari.domain.SubscriptionStatus; import org.motechproject.nms.kilkari.service.SubscriberService; import org.motechproject.nms.props.domain.Service; import org.motechproject.nms.props.service.LogHelper; import org.motechproject.nms.region.domain.Circle; import org.motechproject.nms.region.domain.Language; import org.motechproject.nms.region.domain.State; import org.motechproject.nms.region.service.CircleService; import org.motechproject.nms.region.service.LanguageService; import org.motechproject.nms.region.service.StateService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Controller; import org.springframework.transaction.annotation.Transactional; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseBody; import java.util.HashSet; import java.util.List; import java.util.Set; @Controller public class UserController extends BaseController { public static final String SERVICE_NAME = "serviceName"; @Autowired private SubscriberService subscriberService; @Autowired private FrontLineWorkerService frontLineWorkerService; @Autowired private ServiceUsageService serviceUsageService; @Autowired private ServiceUsageCapService serviceUsageCapService; @Autowired private CircleService circleService; @Autowired private LanguageService languageService; @Autowired private StateService stateService; @Autowired private AnonymousCallAuditDataService anonymousCallAuditDataService; @Autowired private InactiveJobCallAuditDataService inactiveJobCallAuditDataService; /** * 2.2.1 Get User Details API * IVR shall invoke this API when to retrieve details specific to the user identified by callingNumber. * In case user specific details are not available in the database, the API will attempt to load system * defaults based on the operator and circle provided. * /api/mobileacademy/user?callingNumber=9999999900&operator=A&circle=AP&callId=123456789012345 * 3.2.1 Get User Details API * IVR shall invoke this API when to retrieve details specific to the user identified by callingNumber. * In case user specific details are not available in the database, the API will attempt to load system * defaults based on the operator and circle provided. * /api/mobilekunji/user?callingNumber=9999999900&operator=A&circle=AP&callId=234000011111111 * */ @RequestMapping("/{serviceName}/user") // NO CHECKSTYLE Cyclomatic Complexity @ResponseBody @Transactional(noRollbackFor = NotAuthorizedException.class) public UserResponse getUserDetails(@PathVariable String serviceName, @RequestParam(required = false) Long callingNumber, @RequestParam(required = false) String operator, @RequestParam(required = false) String circle, @RequestParam(required = false) String callId) { log(String.format("REQUEST: /%s/user", serviceName), String.format( "callingNumber=%s, callId=%s, operator=%s, circle=%s", LogHelper.obscure(callingNumber), callId, operator, circle)); StringBuilder failureReasons = validate(callingNumber, callId, operator, circle); if (failureReasons.length() > 0) { throw new IllegalArgumentException(failureReasons.toString()); } Circle circleObj = circleService.getByName(circle); UserResponse user = null; /* Make sure the url the user hit corresponds to a service we are expecting */ if (!(MOBILE_ACADEMY.equals(serviceName) || MOBILE_KUNJI.equals(serviceName) || KILKARI.equals(serviceName))) { failureReasons.append(String.format(INVALID, SERVICE_NAME)); } if (failureReasons.length() > 0) { throw new IllegalArgumentException(failureReasons.toString()); } /* Handle the FLW services */ if (MOBILE_ACADEMY.equals(serviceName) || MOBILE_KUNJI.equals(serviceName)) { user = getFrontLineWorkerResponseUser(serviceName, callingNumber, circleObj); } /* Kilkari in the house! */ if (KILKARI.equals(serviceName)) { if (!validateKilkariServiceAvailability(callingNumber, circleObj)) { throw new NotDeployedException(String.format(NOT_DEPLOYED, Service.KILKARI)); } user = getKilkariResponseUser(callingNumber); } Language defaultLanguage = null; if (circleObj != null) { defaultLanguage = circleObj.getDefaultLanguage(); } // If no circle was provided, or if the provided circle doesn't have a default language, use the national if (defaultLanguage == null) { defaultLanguage = languageService.getNationalDefaultLanguage(); } if (defaultLanguage != null && user != null) { user.setDefaultLanguageLocationCode(defaultLanguage.getCode()); } // If the user does not have a language location code we want to return the allowed language location // codes for the provided circle, or all if no circle was provided Set<Language> languages = new HashSet<>(); if (user.getLanguageLocationCode() == null && circleObj != null) { languages = languageService.getAllForCircle(circleObj); } if (user.getLanguageLocationCode() == null && circleObj == null) { languages = languageService.getAll(); } if (languages.size() > 0) { Set<String> allowedLanguageLocations = new HashSet<>(); for (Language language : languages) { allowedLanguageLocations.add(language.getCode()); } user.setAllowedLanguageLocationCodes(allowedLanguageLocations); } log(String.format("RESPONSE: /%s/user", serviceName), String.format("callId=%s, %s", callId, user.toString())); return user; } private boolean validateKilkariServiceAvailability(Long callingNumber, Circle circle) { // NO CHECKSTYLE Cyclomatic Complexity List<Subscriber> subscribers = subscriberService.getSubscriber(callingNumber); // 1. Check for existing subscriber and if mcts data is available // 2. If not mcts data available, use circle information for existing subscriber // 3. If existing subscriber has no circle available or is new subscriber, use circle passed from imi Circle subscriberCircle = null; // check for subscriber and mcts location data if (!subscribers.isEmpty()) { for (Subscriber subscriber : subscribers) { MctsMother mother = subscriber.getMother(); if (mother != null) { return serviceDeployedInUserState(Service.KILKARI, mother.getState()); } MctsChild child = subscriber.getChild(); if (child != null) { return serviceDeployedInUserState(Service.KILKARI, child.getState()); } subscriberCircle = subscriber.getCircle(); } } // Try to validate from circle since we don't have MCTS data for state. Choose circle from subscriber or // passed from IMI as last resort Circle currentCircle = (!subscribers.isEmpty() && subscriberCircle != null) ? subscriberCircle : circle; if (currentCircle == null) { // No circle available return true; } Set<State> states = stateService.getAllInCircle(currentCircle); if (states == null || states.isEmpty()) { // No state available return true; } if (states.size() == 1) { // only one state available return serviceDeployedInUserState(Service.KILKARI, states.iterator().next()); } for (State currentState : states) { // multiple states, false if undeployed in all states if (serviceDeployedInUserState(Service.KILKARI, currentState)) { return true; } } return false; } private UserResponse getKilkariResponseUser(Long callingNumber) { List<Subscriber> subscribers = subscriberService.getSubscriber(callingNumber); KilkariUserResponse kilkariUserResponse = new KilkariUserResponse(); Set<String> packs = new HashSet<>(); if (!subscribers.isEmpty()) { for (Subscriber subscriber : subscribers) { Set<Subscription> subscriptions = subscriber.getSubscriptions(); for (Subscription subscription : subscriptions) { if ((subscription.getStatus() == SubscriptionStatus.ACTIVE) || (subscription.getStatus() == SubscriptionStatus.PENDING_ACTIVATION)) { packs.add(subscription.getSubscriptionPack().getName()); } } Language subscriberLanguage = subscriber.getLanguage(); if (subscriberLanguage != null) { kilkariUserResponse.setLanguageLocationCode(subscriberLanguage.getCode()); } } } kilkariUserResponse.setSubscriptionPackList(packs); return kilkariUserResponse; } private UserResponse getFrontLineWorkerResponseUser(String serviceName, Long callingNumber, Circle circle) { FlwUserResponse user = new FlwUserResponse(); Service service = getServiceFromName(serviceName); ServiceUsage serviceUsage = new ServiceUsage(null, service, 0, 0, false); FrontLineWorker flw = frontLineWorkerService.getByContactNumber(callingNumber); if (flw == null) { flw = frontLineWorkerService.getInctiveByContactNumber(callingNumber); } State state = getStateForFrontLineWorker(flw, circle); if (state != null) { if (!serviceDeployedInUserState(service, state)) { throw new NotDeployedException(String.format(NOT_DEPLOYED, service)); } } else { // If we have no state for the user see if the service is deployed in at least one state in the circle if (!serviceDeployedInCircle(service, circle)) { throw new NotDeployedException(String.format(NOT_DEPLOYED, service)); } } if (MOBILE_ACADEMY.equals(serviceName)) { // make sure that flw is authorized to use MA restrictAnonymousMAUserCheck(flw, callingNumber, circle); restrictInactiveJobUserCheck(flw); } if (flw != null) { Language language = flw.getLanguage(); if (null != language) { user.setLanguageLocationCode(language.getCode()); } serviceUsage = serviceUsageService.getCurrentMonthlyUsageForFLWAndService(flw, service); if (!frontLineWorkerAuthorizedForAccess(flw, state)) { throw new NotAuthorizedException(String.format(NOT_AUTHORIZED, CALLING_NUMBER)); } } ServiceUsageCap serviceUsageCap = serviceUsageCapService.getServiceUsageCap(state, service); user.setCurrentUsageInPulses(serviceUsage.getUsageInPulses()); user.setEndOfUsagePromptCounter(serviceUsage.getEndOfUsage()); user.setWelcomePromptFlag(serviceUsage.getWelcomePrompt()); user.setMaxAllowedUsageInPulses(serviceUsageCap.getMaxUsageInPulses()); user.setMaxAllowedEndOfUsagePrompt(2); return user; } private void restrictAnonymousMAUserCheck(FrontLineWorker flw, Long callingNumber, Circle circle) { if (flw == null || flw.getStatus() == FrontLineWorkerStatus.ANONYMOUS || flw.getMctsFlwId() == null || flw.getMctsFlwId().isEmpty()) { // New requirement - https://applab.atlassian.net/projects/NMS/issues/NMS-325 - Block anonymous FLWs // if flw is null here, we don't already have a record from MCTS. return 403 // We might have a non-null flw with anonymous status from earlier calls, if so, still return 403 and // force them to come through MCTS String circleName = circle == null ? null : circle.getName(); anonymousCallAuditDataService.create(new AnonymousCallAudit(DateUtil.now(), circleName, callingNumber)); throw new NotAuthorizedException(String.format(NOT_AUTHORIZED, CALLING_NUMBER)); } } private void restrictInactiveJobUserCheck(FrontLineWorker flw) { if (flw != null && flw.getJobStatus() == FlwJobStatus.INACTIVE) { inactiveJobCallAuditDataService.create(new InactiveJobCallAudit(DateUtil.now(), flw.getFlwId(), flw.getMctsFlwId(), flw.getContactNumber())); throw new NotAuthorizedException(String.format(NOT_AUTHORIZED, CALLING_NUMBER)); } else if (flw == null) { throw new NotAuthorizedException(String.format(NOT_AUTHORIZED, CALLING_NUMBER)); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.pig.test; import java.util.*; import java.io.DataInputStream; import java.io.DataOutput; import java.io.DataOutputStream; import java.io.IOException; import java.io.PipedInputStream; import java.io.PipedOutputStream; import org.junit.Test; import org.apache.pig.data.*; import org.apache.pig.impl.util.Spillable; /** * This class will exercise the basic Pig data model and members. It tests for proper behavior in * assigment and comparision, as well as function application. * * @author dnm */ public class TestDataBag extends junit.framework.TestCase { private Random rand = new Random(); private class TestMemoryManager { ArrayList<Spillable> mManagedObjects = new ArrayList<Spillable>(); public void register(Spillable s) { mManagedObjects.add(s); } public void forceSpill() throws IOException { Iterator<Spillable> i = mManagedObjects.iterator(); while (i.hasNext()) i.next().spill(); } } // Need to override the regular bag factory so I can register with my local // memory manager. private class LocalBagFactory { TestMemoryManager mMemMgr; public LocalBagFactory(TestMemoryManager mgr) { mMemMgr = mgr; } public DataBag newDefaultBag() { DataBag bag = new DefaultDataBag(); mMemMgr.register(bag); return bag; } public DataBag newSortedBag(Comparator<Tuple> comp) { DataBag bag = new SortedDataBag(comp); mMemMgr.register(bag); return bag; } public DataBag newDistinctBag() { DataBag bag = new DistinctDataBag(); mMemMgr.register(bag); return bag; } } protected void tearDown() throws Exception { BagFactory.resetSelf(); System.clearProperty("pig.data.bag.factory.name"); System.clearProperty("pig.data.bag.factory.jar"); } // Test reading and writing default from memory, no spills. @Test public void testDefaultInMemory() throws Exception { TestMemoryManager mgr = new TestMemoryManager(); LocalBagFactory factory = new LocalBagFactory(mgr); DataBag b = factory.newDefaultBag(); ArrayList<Tuple> rightAnswer = new ArrayList<Tuple>(10); // Write tuples into both for (int i = 0; i < 10; i++) { Tuple t = TupleFactory.getInstance().newTuple(new Integer(i)); b.add(t); rightAnswer.add(t); } // Read tuples back, hopefully they come out in the same order. Iterator<Tuple> bIter = b.iterator(); Iterator<Tuple> rIter = rightAnswer.iterator(); while (rIter.hasNext()) { assertTrue("bag ran out of tuples before answer", bIter.hasNext()); assertEquals("tuples should be the same", bIter.next(), rIter.next()); } assertFalse("right answer ran out of tuples before the bag", bIter.hasNext()); } // Test reading and writing default from file with one spill @Test public void testDefaultSingleSpill() throws Exception { TestMemoryManager mgr = new TestMemoryManager(); LocalBagFactory factory = new LocalBagFactory(mgr); DataBag b = factory.newDefaultBag(); ArrayList<Tuple> rightAnswer = new ArrayList<Tuple>(10); // Write tuples into both for (int i = 0; i < 10; i++) { Tuple t = TupleFactory.getInstance().newTuple(new Integer(i)); b.add(t); rightAnswer.add(t); } mgr.forceSpill(); // Read tuples back, hopefully they come out in the same order. Iterator<Tuple> bIter = b.iterator(); Iterator<Tuple> rIter = rightAnswer.iterator(); while (rIter.hasNext()) { assertTrue("bag ran out of tuples before answer", bIter.hasNext()); assertEquals("tuples should be the same", bIter.next(), rIter.next()); } assertFalse("right answer ran out of tuples before the bag", bIter.hasNext()); } // Test reading and writing default from file with three spills @Test public void testDefaultTripleSpill() throws Exception { TestMemoryManager mgr = new TestMemoryManager(); LocalBagFactory factory = new LocalBagFactory(mgr); DataBag b = factory.newDefaultBag(); ArrayList<Tuple> rightAnswer = new ArrayList<Tuple>(30); // Write tuples into both for (int j = 0; j < 3; j++) { for (int i = 0; i < 10; i++) { Tuple t = TupleFactory.getInstance().newTuple(new Integer(i)); b.add(t); rightAnswer.add(t); } mgr.forceSpill(); } // Read tuples back, hopefully they come out in the same order. Iterator<Tuple> bIter = b.iterator(); Iterator<Tuple> rIter = rightAnswer.iterator(); while (rIter.hasNext()) { assertTrue("bag ran out of tuples before answer", bIter.hasNext()); assertEquals("tuples should be the same", bIter.next(), rIter.next()); } assertFalse("right answer ran out of tuples before the bag", bIter.hasNext()); } // Test reading with some in file, some in memory. @Test public void testDefaultInMemInFile() throws Exception { TestMemoryManager mgr = new TestMemoryManager(); LocalBagFactory factory = new LocalBagFactory(mgr); DataBag b = factory.newDefaultBag(); ArrayList<Tuple> rightAnswer = new ArrayList<Tuple>(20); // Write tuples into both for (int i = 0; i < 10; i++) { Tuple t = TupleFactory.getInstance().newTuple(new Integer(i)); b.add(t); rightAnswer.add(t); } mgr.forceSpill(); for (int i = 0; i < 10; i++) { Tuple t = TupleFactory.getInstance().newTuple(new Integer(i)); b.add(t); rightAnswer.add(t); } // Read tuples back, hopefully they come out in the same order. Iterator<Tuple> bIter = b.iterator(); Iterator<Tuple> rIter = rightAnswer.iterator(); while (rIter.hasNext()) { assertTrue("bag ran out of tuples before answer", bIter.hasNext()); assertEquals("tuples should be the same", bIter.next(), rIter.next()); } assertFalse("right answer ran out of tuples before the bag", bIter.hasNext()); } // Test reading with a spill happening in the middle of the read. @Test public void testDefaultSpillDuringRead() throws Exception { TestMemoryManager mgr = new TestMemoryManager(); LocalBagFactory factory = new LocalBagFactory(mgr); DataBag b = factory.newDefaultBag(); ArrayList<Tuple> rightAnswer = new ArrayList<Tuple>(20); // Write tuples into both for (int i = 0; i < 10; i++) { Tuple t = TupleFactory.getInstance().newTuple(new Integer(i)); b.add(t); rightAnswer.add(t); } mgr.forceSpill(); for (int i = 0; i < 10; i++) { Tuple t = TupleFactory.getInstance().newTuple(new Integer(i)); b.add(t); rightAnswer.add(t); } // Read tuples back, hopefully they come out in the same order. Iterator<Tuple> bIter = b.iterator(); Iterator<Tuple> rIter = rightAnswer.iterator(); for (int i = 0; i < 15; i++) { assertTrue("bag ran out of tuples before answer", bIter.hasNext()); assertEquals("tuples should be the same", bIter.next(), rIter.next()); } mgr.forceSpill(); while (rIter.hasNext()) { assertTrue("bag ran out of tuples before answer", bIter.hasNext()); assertEquals("tuples should be the same", bIter.next(), rIter.next()); } assertFalse("right answer ran out of tuples before the bag", bIter.hasNext()); } // Test reading and writing sorted from memory, no spills. @Test public void testSortedInMemory() throws Exception { TestMemoryManager mgr = new TestMemoryManager(); LocalBagFactory factory = new LocalBagFactory(mgr); DataBag b = factory.newSortedBag(null); PriorityQueue<Tuple> rightAnswer = new PriorityQueue<Tuple>(10); // Write tuples into both for (int i = 0; i < 10; i++) { Tuple t = TupleFactory.getInstance().newTuple(new Integer(rand.nextInt())); b.add(t); rightAnswer.add(t); } // Read tuples back, hopefully they come out in the same order. Iterator<Tuple> bIter = b.iterator(); Tuple t; while ((t = rightAnswer.poll()) != null) { assertTrue("bag ran out of tuples before answer", bIter.hasNext()); assertEquals("tuples should be the same", bIter.next(), t); } assertFalse("right answer ran out of tuples before the bag", bIter.hasNext()); } // Test reading and writing default from file with one spill @Test public void testSortedSingleSpill() throws Exception { TestMemoryManager mgr = new TestMemoryManager(); LocalBagFactory factory = new LocalBagFactory(mgr); DataBag b = factory.newSortedBag(null); PriorityQueue<Tuple> rightAnswer = new PriorityQueue<Tuple>(10); // Write tuples into both for (int i = 0; i < 10; i++) { Tuple t = TupleFactory.getInstance().newTuple(new Integer(rand.nextInt())); b.add(t); rightAnswer.add(t); } mgr.forceSpill(); // Read tuples back, hopefully they come out in the same order. Iterator<Tuple> bIter = b.iterator(); Tuple t; while ((t = rightAnswer.poll()) != null) { assertTrue("bag ran out of tuples before answer", bIter.hasNext()); assertEquals("tuples should be the same", bIter.next(), t); } assertFalse("right answer ran out of tuples before the bag", bIter.hasNext()); } // Test reading and writing default from file with three spills @Test public void testSortedTripleSpill() throws Exception { TestMemoryManager mgr = new TestMemoryManager(); LocalBagFactory factory = new LocalBagFactory(mgr); DataBag b = factory.newSortedBag(null); PriorityQueue<Tuple> rightAnswer = new PriorityQueue<Tuple>(30); // Write tuples into both for (int j = 0; j < 3; j++) { for (int i = 0; i < 10; i++) { Tuple t = TupleFactory.getInstance().newTuple(new Integer(rand.nextInt())); b.add(t); rightAnswer.add(t); } mgr.forceSpill(); } // Read tuples back, hopefully they come out in the same order. Iterator<Tuple> bIter = b.iterator(); Tuple t; while ((t = rightAnswer.poll()) != null) { assertTrue("bag ran out of tuples before answer", bIter.hasNext()); assertEquals("tuples should be the same", bIter.next(), t); } assertFalse("right answer ran out of tuples before the bag", bIter.hasNext()); } // Test reading with some in file, some in memory. @Test public void testSortedInMemInFile() throws Exception { TestMemoryManager mgr = new TestMemoryManager(); LocalBagFactory factory = new LocalBagFactory(mgr); DataBag b = factory.newSortedBag(null); PriorityQueue<Tuple> rightAnswer = new PriorityQueue<Tuple>(20); // Write tuples into both for (int i = 0; i < 10; i++) { Tuple t = TupleFactory.getInstance().newTuple(new Integer(rand.nextInt())); b.add(t); rightAnswer.add(t); } mgr.forceSpill(); for (int i = 0; i < 10; i++) { Tuple t = TupleFactory.getInstance().newTuple(new Integer(rand.nextInt())); b.add(t); rightAnswer.add(t); } // Read tuples back, hopefully they come out in the same order. Iterator<Tuple> bIter = b.iterator(); Tuple t; while ((t = rightAnswer.poll()) != null) { assertTrue("bag ran out of tuples before answer", bIter.hasNext()); assertEquals("tuples should be the same", bIter.next(), t); } assertFalse("right answer ran out of tuples before the bag", bIter.hasNext()); } // Test reading with a spill happening in the middle of the read. @Test public void testSortedSpillDuringRead() throws Exception { TestMemoryManager mgr = new TestMemoryManager(); LocalBagFactory factory = new LocalBagFactory(mgr); DataBag b = factory.newSortedBag(null); PriorityQueue<Tuple> rightAnswer = new PriorityQueue<Tuple>(20); // Write tuples into both for (int i = 0; i < 10; i++) { Tuple t = TupleFactory.getInstance().newTuple(new Integer(rand.nextInt())); b.add(t); rightAnswer.add(t); } mgr.forceSpill(); for (int i = 0; i < 10; i++) { Tuple t = TupleFactory.getInstance().newTuple(new Integer(rand.nextInt())); b.add(t); rightAnswer.add(t); } // Read tuples back, hopefully they come out in the same order. Iterator<Tuple> bIter = b.iterator(); for (int i = 0; i < 15; i++) { assertTrue("bag ran out of tuples before answer", bIter.hasNext()); assertEquals("tuples should be the same", bIter.next(), rightAnswer.poll()); } mgr.forceSpill(); Tuple t; while ((t = rightAnswer.poll()) != null) { assertTrue("bag ran out of tuples before answer", bIter.hasNext()); assertEquals("tuples should be the same", bIter.next(), t); } assertFalse("right answer ran out of tuples before the bag", bIter.hasNext()); } // Test reading with first spill happening in the middle of the read. @Test public void testSortedFirstSpillDuringRead() throws Exception { TestMemoryManager mgr = new TestMemoryManager(); LocalBagFactory factory = new LocalBagFactory(mgr); DataBag b = factory.newSortedBag(null); PriorityQueue<Tuple> rightAnswer = new PriorityQueue<Tuple>(20); for (int i = 0; i < 10; i++) { Tuple t = TupleFactory.getInstance().newTuple(new Integer(rand.nextInt())); b.add(t); rightAnswer.add(t); } // Read tuples back, hopefully they come out in the same order. Iterator<Tuple> bIter = b.iterator(); for (int i = 0; i < 5; i++) { assertTrue("bag ran out of tuples before answer", bIter.hasNext()); assertEquals("tuples should be the same", bIter.next(), rightAnswer.poll()); } mgr.forceSpill(); Tuple t; while ((t = rightAnswer.poll()) != null) { assertTrue("bag ran out of tuples before answer", bIter.hasNext()); assertEquals("tuples should be the same", bIter.next(), t); } assertFalse("right answer ran out of tuples before the bag", bIter.hasNext()); } // Test reading and writing sorted file with so many spills it requires // premerge. @Test public void testSortedPreMerge() throws Exception { TestMemoryManager mgr = new TestMemoryManager(); LocalBagFactory factory = new LocalBagFactory(mgr); DataBag b = factory.newSortedBag(null); PriorityQueue<Tuple> rightAnswer = new PriorityQueue<Tuple>(30); // Write tuples into both for (int j = 0; j < 373; j++) { for (int i = 0; i < 10; i++) { Tuple t = TupleFactory.getInstance().newTuple(new Integer(rand.nextInt())); b.add(t); rightAnswer.add(t); } mgr.forceSpill(); } // Read tuples back, hopefully they come out in the same order. Iterator<Tuple> bIter = b.iterator(); Tuple t; while ((t = rightAnswer.poll()) != null) { assertTrue("bag ran out of tuples before answer", bIter.hasNext()); assertEquals("tuples should be the same", bIter.next(), t); } assertFalse("right answer ran out of tuples before the bag", bIter.hasNext()); } // Test reading and writing distinct from memory, no spills. @Test public void testDistinctInMemory() throws Exception { TestMemoryManager mgr = new TestMemoryManager(); LocalBagFactory factory = new LocalBagFactory(mgr); DataBag b = factory.newDistinctBag(); TreeSet<Tuple> rightAnswer = new TreeSet<Tuple>(); // Write tuples into both for (int i = 0; i < 50; i++) { Tuple t = TupleFactory.getInstance().newTuple(new Integer(rand.nextInt() % 5)); b.add(t); rightAnswer.add(t); } // Read tuples back, hopefully they come out in the same order. Iterator<Tuple> bIter = b.iterator(); Iterator<Tuple> rIter = rightAnswer.iterator(); while (rIter.hasNext()) { assertTrue("bag ran out of tuples before answer", bIter.hasNext()); assertEquals("tuples should be the same", bIter.next(), rIter.next()); } assertFalse("right answer ran out of tuples before the bag", bIter.hasNext()); } // Test reading and writing distinct from file with one spill @Test public void testDistinctSingleSpill() throws Exception { TestMemoryManager mgr = new TestMemoryManager(); LocalBagFactory factory = new LocalBagFactory(mgr); DataBag b = factory.newDistinctBag(); TreeSet<Tuple> rightAnswer = new TreeSet<Tuple>(); // Write tuples into both for (int i = 0; i < 50; i++) { Tuple t = TupleFactory.getInstance().newTuple(new Integer(rand.nextInt() % 5)); b.add(t); rightAnswer.add(t); } mgr.forceSpill(); // Read tuples back, hopefully they come out in the same order. Iterator<Tuple> bIter = b.iterator(); Iterator<Tuple> rIter = rightAnswer.iterator(); while (rIter.hasNext()) { assertTrue("bag ran out of tuples before answer", bIter.hasNext()); assertEquals("tuples should be the same", bIter.next(), rIter.next()); } assertFalse("right answer ran out of tuples before the bag", bIter.hasNext()); } // Test reading and writing distinct from file with three spills @Test public void testDistinctTripleSpill() throws Exception { TestMemoryManager mgr = new TestMemoryManager(); LocalBagFactory factory = new LocalBagFactory(mgr); DataBag b = factory.newDistinctBag(); TreeSet<Tuple> rightAnswer = new TreeSet<Tuple>(); // Write tuples into both for (int j = 0; j < 3; j++) { for (int i = 0; i < 50; i++) { Tuple t = TupleFactory.getInstance().newTuple(new Integer(rand.nextInt() % 5)); b.add(t); rightAnswer.add(t); } mgr.forceSpill(); } assertEquals("Size of distinct data bag is incorrect", b.size(), rightAnswer.size()); // Read tuples back, hopefully they come out in the same order. Iterator<Tuple> bIter = b.iterator(); Iterator<Tuple> rIter = rightAnswer.iterator(); while (rIter.hasNext()) { assertTrue("bag ran out of tuples before answer", bIter.hasNext()); assertEquals("tuples should be the same", bIter.next(), rIter.next()); } assertFalse("right answer ran out of tuples before the bag", bIter.hasNext()); } // Test reading with some in file, some in memory. @Test public void testDistinctInMemInFile() throws Exception { TestMemoryManager mgr = new TestMemoryManager(); LocalBagFactory factory = new LocalBagFactory(mgr); DataBag b = factory.newDistinctBag(); TreeSet<Tuple> rightAnswer = new TreeSet<Tuple>(); // Write tuples into both for (int i = 0; i < 50; i++) { Tuple t = TupleFactory.getInstance().newTuple(new Integer(rand.nextInt() % 5)); b.add(t); rightAnswer.add(t); } mgr.forceSpill(); for (int i = 0; i < 50; i++) { Tuple t = TupleFactory.getInstance().newTuple(new Integer(i)); b.add(t); rightAnswer.add(t); } // Read tuples back, hopefully they come out in the same order. Iterator<Tuple> bIter = b.iterator(); Iterator<Tuple> rIter = rightAnswer.iterator(); while (rIter.hasNext()) { assertTrue("bag ran out of tuples before answer", bIter.hasNext()); assertEquals("tuples should be the same", bIter.next(), rIter.next()); } assertFalse("right answer ran out of tuples before the bag", bIter.hasNext()); } // Test reading with a spill happening in the middle of the read. @Test public void testDistinctSpillDuringRead() throws Exception { TestMemoryManager mgr = new TestMemoryManager(); LocalBagFactory factory = new LocalBagFactory(mgr); DataBag b = factory.newDistinctBag(); TreeSet<Tuple> rightAnswer = new TreeSet<Tuple>(); // Write tuples into both for (int i = 0; i < 50; i++) { Tuple t = TupleFactory.getInstance().newTuple(new Integer(rand.nextInt() % 5)); b.add(t); rightAnswer.add(t); } mgr.forceSpill(); for (int i = 0; i < 50; i++) { Tuple t = TupleFactory.getInstance().newTuple(new Integer(i)); b.add(t); rightAnswer.add(t); } // Read tuples back, hopefully they come out in the same order. Iterator<Tuple> bIter = b.iterator(); Iterator<Tuple> rIter = rightAnswer.iterator(); for (int i = 0; i < 5; i++) { assertTrue("bag ran out of tuples before answer", bIter.hasNext()); assertEquals("tuples should be the same", bIter.next(), rIter.next()); } mgr.forceSpill(); while (rIter.hasNext()) { assertTrue("bag ran out of tuples before answer", bIter.hasNext()); assertEquals("tuples should be the same", bIter.next(), rIter.next()); } assertFalse("right answer ran out of tuples before the bag", bIter.hasNext()); } // Test reading and writing distinct from file with enough spills to // force a pre-merge @Test public void testDistinctPreMerge() throws Exception { TestMemoryManager mgr = new TestMemoryManager(); LocalBagFactory factory = new LocalBagFactory(mgr); DataBag b = factory.newDistinctBag(); TreeSet<Tuple> rightAnswer = new TreeSet<Tuple>(); // Write tuples into both for (int j = 0; j < 321; j++) { for (int i = 0; i < 50; i++) { Tuple t = TupleFactory.getInstance().newTuple(new Integer(rand.nextInt() % 5)); b.add(t); rightAnswer.add(t); } mgr.forceSpill(); } // Read tuples back, hopefully they come out in the same order. Iterator<Tuple> bIter = b.iterator(); Iterator<Tuple> rIter = rightAnswer.iterator(); while (rIter.hasNext()) { assertTrue("bag ran out of tuples before answer", bIter.hasNext()); assertEquals("tuples should be the same", bIter.next(), rIter.next()); } assertFalse("right answer ran out of tuples before the bag", bIter.hasNext()); } // Test the default bag factory. @Test public void testDefaultBagFactory() throws Exception { BagFactory f = BagFactory.getInstance(); DataBag bag = f.newDefaultBag(); DataBag sorted = f.newSortedBag(null); DataBag distinct = f.newDistinctBag(); assertTrue("Expected a default bag", (bag instanceof DefaultDataBag)); assertTrue("Expected a sorted bag", (sorted instanceof SortedDataBag)); assertTrue("Expected a distinct bag", (distinct instanceof DistinctDataBag)); } @Test public void testProvidedBagFactory() throws Exception { // Test bogus factory name. BagFactory.resetSelf(); System.setProperty("pig.data.bag.factory.name", "no such class"); System.setProperty("pig.data.bag.factory.jar", "file:./pig.jar"); boolean caughtIt = false; try { BagFactory f = BagFactory.getInstance(); } catch (RuntimeException re) { assertEquals("Expected Unable to instantiate message", "Unable to instantiate bag factory no such class", re.getMessage()); caughtIt = true; } assertTrue("Expected to catch exception", caughtIt); // Test factory that isn't a BagFactory BagFactory.resetSelf(); System.setProperty("pig.data.bag.factory.name", "org.apache.pig.test.TestDataBag"); System.setProperty("pig.data.bag.factory.jar", "file:./pig.jar"); caughtIt = false; try { BagFactory f = BagFactory.getInstance(); } catch (RuntimeException re) { assertEquals("Expected does not extend BagFactory message", "Provided factory org.apache.pig.test.TestDataBag does not extend BagFactory!", re.getMessage()); caughtIt = true; } assertTrue("Expected to catch exception", caughtIt); // Test that we can instantiate our test factory. BagFactory.resetSelf(); System.setProperty("pig.data.bag.factory.name", "org.apache.pig.test.NonDefaultBagFactory"); System.setProperty("pig.data.bag.factory.jar", "file:./pig.jar"); BagFactory f = BagFactory.getInstance(); DataBag b = f.newDefaultBag(); b = f.newSortedBag(null); b = f.newDistinctBag(); BagFactory.resetSelf(); } @Test public void testNonSpillableDataBagEquals1() throws Exception { String[][] tupleContents = new String[][] {{"a", "b"},{"c", "d" }, { "e", "f"} }; NonSpillableDataBag bg1 = new NonSpillableDataBag(); for (int i = 0; i < tupleContents.length; i++) { bg1.add(Util.createTuple(tupleContents[i])); } NonSpillableDataBag bg2 = new NonSpillableDataBag(); for (int i = 0; i < tupleContents.length; i++) { bg2.add(Util.createTuple(tupleContents[i])); } assertEquals(bg1, bg2); } @Test public void testNonSpillableDataBagEquals2() throws Exception { String[][] tupleContents = new String[][] {{"a", "b"},{"c", "d" }, { "e", "f"} }; NonSpillableDataBag bg1 = new NonSpillableDataBag(); for (int i = 0; i < tupleContents.length; i++) { bg1.add(Util.createTuple(tupleContents[i])); } tupleContents = new String[][] {{"c", "d" }, {"a", "b"},{ "e", "f"} }; NonSpillableDataBag bg2 = new NonSpillableDataBag(); for (int i = 0; i < tupleContents.length; i++) { bg2.add(Util.createTuple(tupleContents[i])); } assertEquals(bg1, bg2); } @Test public void testDefaultDataBagEquals1() throws Exception { String[][] tupleContents = new String[][] {{"a", "b"},{"c", "d" }, { "e", "f"} }; TestMemoryManager mgr = new TestMemoryManager(); LocalBagFactory factory = new LocalBagFactory(mgr); DataBag bg1 = factory.newDefaultBag(); for (int i = 0; i < tupleContents.length; i++) { bg1.add(Util.createTuple(tupleContents[i])); } DataBag bg2 = factory.newDefaultBag(); for (int i = 0; i < tupleContents.length; i++) { bg2.add(Util.createTuple(tupleContents[i])); } assertEquals(bg1, bg2); } @Test public void testDefaultDataBagEquals2() throws Exception { String[][] tupleContents = new String[][] {{"a", "b"},{"c", "d" }, { "e", "f"} }; TestMemoryManager mgr = new TestMemoryManager(); LocalBagFactory factory = new LocalBagFactory(mgr); DataBag bg1 = factory.newDefaultBag(); for (int i = 0; i < tupleContents.length; i++) { bg1.add(Util.createTuple(tupleContents[i])); } tupleContents = new String[][] {{"c", "d" }, {"a", "b"},{ "e", "f"} }; DataBag bg2 = factory.newDefaultBag(); for (int i = 0; i < tupleContents.length; i++) { bg2.add(Util.createTuple(tupleContents[i])); } assertEquals(bg1, bg2); } public void testInternalCachedBag() throws Exception { // check adding empty tuple DataBag bg0 = new InternalCachedBag(); bg0.add(TupleFactory.getInstance().newTuple()); bg0.add(TupleFactory.getInstance().newTuple()); assertEquals(bg0.size(), 2); // check equal of bags DataBag bg1 = new InternalCachedBag(1, 0.5f); assertEquals(bg1.size(), 0); String[][] tupleContents = new String[][] {{"a", "b"},{"c", "d" }, { "e", "f"} }; for (int i = 0; i < tupleContents.length; i++) { bg1.add(Util.createTuple(tupleContents[i])); } // check size, and isSorted(), isDistinct() assertEquals(bg1.size(), 3); assertFalse(bg1.isSorted()); assertFalse(bg1.isDistinct()); tupleContents = new String[][] {{"c", "d" }, {"a", "b"},{ "e", "f"} }; DataBag bg2 = new InternalCachedBag(1, 0.5f); for (int i = 0; i < tupleContents.length; i++) { bg2.add(Util.createTuple(tupleContents[i])); } assertEquals(bg1, bg2); // check bag with data written to disk DataBag bg3 = new InternalCachedBag(1, 0.0f); tupleContents = new String[][] {{ "e", "f"}, {"c", "d" }, {"a", "b"}}; for (int i = 0; i < tupleContents.length; i++) { bg3.add(Util.createTuple(tupleContents[i])); } assertEquals(bg1, bg3); // check iterator Iterator<Tuple> iter = bg3.iterator(); DataBag bg4 = new InternalCachedBag(1, 0.0f); while(iter.hasNext()) { bg4.add(iter.next()); } assertEquals(bg3, bg4); // call iterator methods with irregular order iter = bg3.iterator(); assertTrue(iter.hasNext()); assertTrue(iter.hasNext()); DataBag bg5 = new InternalCachedBag(1, 0.0f); bg5.add(iter.next()); bg5.add(iter.next()); assertTrue(iter.hasNext()); bg5.add(iter.next()); assertFalse(iter.hasNext()); assertFalse(iter.hasNext()); assertEquals(bg3, bg5); bg4.clear(); assertEquals(bg4.size(), 0); } public void testInternalSortedBag() throws Exception { // check adding empty tuple DataBag bg0 = new InternalSortedBag(); bg0.add(TupleFactory.getInstance().newTuple()); bg0.add(TupleFactory.getInstance().newTuple()); assertEquals(bg0.size(), 2); // check equal of bags DataBag bg1 = new InternalSortedBag(); assertEquals(bg1.size(), 0); String[][] tupleContents = new String[][] {{ "e", "f"}, {"a", "b"}, {"c", "d" }}; for (int i = 0; i < tupleContents.length; i++) { bg1.add(Util.createTuple(tupleContents[i])); } // check size, and isSorted(), isDistinct() assertEquals(bg1.size(), 3); assertTrue(bg1.isSorted()); assertFalse(bg1.isDistinct()); tupleContents = new String[][] {{"c", "d" }, {"a", "b"},{ "e", "f"} }; DataBag bg2 = new InternalSortedBag(); for (int i = 0; i < tupleContents.length; i++) { bg2.add(Util.createTuple(tupleContents[i])); } assertEquals(bg1, bg2); Iterator<Tuple> iter = bg1.iterator(); iter.next().equals(Util.createTuple(new String[] {"a", "b"})); iter.next().equals(Util.createTuple(new String[] {"c", "d"})); iter.next().equals(Util.createTuple(new String[] {"e", "f"})); // check bag with data written to disk DataBag bg3 = new InternalSortedBag(1, 0.0f, null); tupleContents = new String[][] {{ "e", "f"}, {"c", "d" }, {"a", "b"}}; for (int i = 0; i < tupleContents.length; i++) { bg3.add(Util.createTuple(tupleContents[i])); } assertEquals(bg1, bg3); iter = bg3.iterator(); iter.next().equals(Util.createTuple(new String[] {"a", "b"})); iter.next().equals(Util.createTuple(new String[] {"c", "d"})); iter.next().equals(Util.createTuple(new String[] {"e", "f"})); // call iterator methods with irregular order iter = bg3.iterator(); assertTrue(iter.hasNext()); assertTrue(iter.hasNext()); DataBag bg4 = new InternalSortedBag(1, 0.0f, null); bg4.add(iter.next()); bg4.add(iter.next()); assertTrue(iter.hasNext()); bg4.add(iter.next()); assertFalse(iter.hasNext()); assertFalse(iter.hasNext()); assertEquals(bg3, bg4); // check clear bg3.clear(); assertEquals(bg3.size(), 0); // test with all data spill out DataBag bg5 = new InternalSortedBag(); for(int j=0; j<3; j++) { for (int i = 0; i < tupleContents.length; i++) { bg5.add(Util.createTuple(tupleContents[i])); } bg5.spill(); } assertEquals(bg5.size(), 9); iter = bg5.iterator(); for(int i=0; i<3; i++) { iter.next().equals(Util.createTuple(new String[] {"a", "b"})); } for(int i=0; i<3; i++) { iter.next().equals(Util.createTuple(new String[] {"c", "d"})); } for(int i=0; i<3; i++) { iter.next().equals(Util.createTuple(new String[] {"e", "f"})); } // test with most data spill out, with some data in memory // and merge of spill files DataBag bg6 = new InternalSortedBag(); for(int j=0; j<104; j++) { for (int i = 0; i < tupleContents.length; i++) { bg6.add(Util.createTuple(tupleContents[i])); } if (j != 103) { bg6.spill(); } } assertEquals(bg6.size(), 104*3); iter = bg6.iterator(); for(int i=0; i<104; i++) { iter.next().equals(Util.createTuple(new String[] {"a", "b"})); } for(int i=0; i<104; i++) { iter.next().equals(Util.createTuple(new String[] {"c", "d"})); } for(int i=0; i<104; i++) { iter.next().equals(Util.createTuple(new String[] {"e", "f"})); } // check two implementation of sorted bag can compare correctly DataBag bg7 = new SortedDataBag(null); for(int j=0; j<104; j++) { for (int i = 0; i < tupleContents.length; i++) { bg7.add(Util.createTuple(tupleContents[i])); } if (j != 103) { bg7.spill(); } } assertEquals(bg6, bg7); } public void testInternalDistinctBag() throws Exception { // check adding empty tuple DataBag bg0 = new InternalDistinctBag(); bg0.add(TupleFactory.getInstance().newTuple()); bg0.add(TupleFactory.getInstance().newTuple()); assertEquals(bg0.size(), 1); // check equal of bags DataBag bg1 = new InternalDistinctBag(); assertEquals(bg1.size(), 0); String[][] tupleContents = new String[][] {{ "e", "f"}, {"a", "b"}, {"e", "d" }, {"a", "b"}, {"e", "f"}}; for (int i = 0; i < tupleContents.length; i++) { bg1.add(Util.createTuple(tupleContents[i])); } // check size, and isSorted(), isDistinct() assertEquals(bg1.size(), 3); assertFalse(bg1.isSorted()); assertTrue(bg1.isDistinct()); tupleContents = new String[][] {{"a", "b" }, {"e", "d"}, {"e", "d"}, { "e", "f"} }; DataBag bg2 = new InternalDistinctBag(); for (int i = 0; i < tupleContents.length; i++) { bg2.add(Util.createTuple(tupleContents[i])); } assertEquals(bg1, bg2); Iterator<Tuple> iter = bg1.iterator(); iter.next().equals(Util.createTuple(new String[] {"a", "b"})); iter.next().equals(Util.createTuple(new String[] {"c", "d"})); iter.next().equals(Util.createTuple(new String[] {"e", "f"})); // check bag with data written to disk DataBag bg3 = new InternalDistinctBag(1, 0.0f); tupleContents = new String[][] {{ "e", "f"}, {"a", "b"}, {"e", "d" }, {"a", "b"}, {"e", "f"}}; for (int i = 0; i < tupleContents.length; i++) { bg3.add(Util.createTuple(tupleContents[i])); } assertEquals(bg2, bg3); assertEquals(bg3.size(), 3); // call iterator methods with irregular order iter = bg3.iterator(); assertTrue(iter.hasNext()); assertTrue(iter.hasNext()); DataBag bg4 = new InternalDistinctBag(1, 0.0f); bg4.add(iter.next()); bg4.add(iter.next()); assertTrue(iter.hasNext()); bg4.add(iter.next()); assertFalse(iter.hasNext()); assertFalse(iter.hasNext()); assertEquals(bg3, bg4); // check clear bg3.clear(); assertEquals(bg3.size(), 0); // test with all data spill out DataBag bg5 = new InternalDistinctBag(); for(int j=0; j<3; j++) { for (int i = 0; i < tupleContents.length; i++) { bg5.add(Util.createTuple(tupleContents[i])); } bg5.spill(); } assertEquals(bg5.size(), 3); // test with most data spill out, with some data in memory // and merge of spill files DataBag bg6 = new InternalDistinctBag(); for(int j=0; j<104; j++) { for (int i = 0; i < tupleContents.length; i++) { bg6.add(Util.createTuple(tupleContents[i])); } if (j != 103) { bg6.spill(); } } assertEquals(bg6.size(), 3); // check two implementation of sorted bag can compare correctly DataBag bg7 = new DistinctDataBag(); for(int j=0; j<104; j++) { for (int i = 0; i < tupleContents.length; i++) { bg7.add(Util.createTuple(tupleContents[i])); } if (j != 103) { bg7.spill(); } } assertEquals(bg6, bg7); } // See PIG-1231 @Test public void testDataBagIterIdempotent() throws Exception { DataBag bg0 = new DefaultDataBag(); processDataBag(bg0, true); DataBag bg1 = new DistinctDataBag(); processDataBag(bg1, true); DataBag bg2 = new InternalDistinctBag(); processDataBag(bg2, true); DataBag bg3 = new InternalSortedBag(); processDataBag(bg3, true); DataBag bg4 = new SortedDataBag(null); processDataBag(bg4, true); DataBag bg5 = new InternalCachedBag(0, 0); processDataBag(bg5, false); } // See PIG-1285 @Test public void testSerializeSingleTupleBag() throws Exception { Tuple t = Util.createTuple(new String[] {"foo", "bar", "baz"}); DataBag stBag = new SingleTupleBag(t); PipedOutputStream pos = new PipedOutputStream(); DataOutputStream dos = new DataOutputStream(pos); PipedInputStream pis = new PipedInputStream(pos); DataInputStream dis = new DataInputStream(pis); stBag.write(dos); DataBag dfBag = new DefaultDataBag(); dfBag.readFields(dis); assertTrue(dfBag.equals(stBag)); } // See PIG-2550 static class MyCustomTuple extends DefaultTuple { private static final long serialVersionUID = 8156382697467819543L; public MyCustomTuple() { super(); } public MyCustomTuple(Object t) { super(); append(t); } } @Test public void testSpillCustomTuple() throws Exception { DataBag bag = new DefaultDataBag(); Tuple t = new MyCustomTuple(); t.append(1); t.append("hello"); bag.add(t); bag.spill(); Iterator<Tuple> iter = bag.iterator(); Tuple t2 = iter.next(); assertTrue(t2.equals(t)); } void processDataBag(DataBag bg, boolean doSpill) { Tuple t = TupleFactory.getInstance().newTuple(new Integer(0)); bg.add(t); if (doSpill) bg.spill(); Iterator<Tuple> iter = bg.iterator(); assertTrue(iter.hasNext()); iter.next(); assertFalse(iter.hasNext()); assertFalse("hasNext should be idempotent", iter.hasNext()); } }
/* Copyright (C) 2012 Intel Corporation. * All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * For more about this software visit: * http://www.01.org/GraphBuilder */ package com.intel.hadoop.graphbuilder.partition.mapreduce.edge; import java.io.IOException; import org.apache.commons.lang.StringUtils; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.compress.GzipCodec; import org.apache.hadoop.mapred.FileInputFormat; import org.apache.hadoop.mapred.FileOutputFormat; import org.apache.hadoop.mapred.JobClient; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.TextInputFormat; import org.apache.hadoop.mapred.TextOutputFormat; import org.apache.log4j.Logger; import com.intel.hadoop.graphbuilder.graph.GraphOutput; import com.intel.hadoop.graphbuilder.graph.simplegraph.SimpleGraphOutput; import com.intel.hadoop.graphbuilder.parser.FieldParser; import com.intel.hadoop.graphbuilder.parser.GraphParser; import com.intel.hadoop.graphbuilder.partition.mapreduce.keyvalue.IngressKeyType; import com.intel.hadoop.graphbuilder.partition.mapreduce.keyvalue.IngressValueType; /** * The MapRedue class takes from input directory a list of edges and vertices, * and output 2 parts: partitioned graphs and a list of distributed vertex * records. * <p> * Input directory: Can take multiple input directories containing list of * edges. Output directory structure: * <ul> * <li>$outputdir/partition{$i}/subpart{$j}/edata for edge data.</li> * <li>Metafile: $outputdir/partition{$i}/subpart{$j} for meta info.</li> * <li>Graph structure: $outputdir/partition{$i}/subpart{$j}/edgelist for * adjacency structure.</li> * <li>VertexRecords: $outputdir/vrecord list of vertex records.</li> * </ul> * </p> * */ public class EdgeIngressMR { private static final Logger LOG = Logger.getLogger(EdgeIngressMR.class); /** MapReduce Job Counters. */ public static enum COUNTER { NUM_VERTICES, NUM_EDGES }; /** * Default constructor, initialize with parsers. * * @param graphparser * @param vidparser * @param vdataparser * @param edataparser */ public EdgeIngressMR(Class graphparser, Class vidparser, Class vdataparser, Class edataparser) { gzip = false; jobName = "Ingress Mapreduce Driver"; setParser(graphparser, vidparser, vdataparser, edataparser); conf = new JobConf(EdgeIngressMR.class); } /** * Set the parser class. * * @param parser */ public void setParser(Class graphparser, Class vidparser, Class vdataparser, Class edataparser) { try { this.graphparser = (GraphParser) graphparser.newInstance(); this.vidparser = (FieldParser) vidparser.newInstance(); this.vdataparser = (FieldParser) vdataparser.newInstance(); this.edataparser = (FieldParser) edataparser.newInstance(); } catch (InstantiationException e) { e.printStackTrace(); LOG.fatal("Parser classes: \n" + graphparser + "\n" + vidparser + "\n" + vdataparser + "\n" + edataparser + " do not exist."); } catch (IllegalAccessException e) { e.printStackTrace(); LOG.fatal("Parser classes: \n" + graphparser + "\n" + vidparser + "\n" + vdataparser + "\n" + edataparser + " do not exist."); } } /** * Set the job name. * * @param name */ public void setJobName(String name) { this.jobName = name; } /** * Set option for using gzip compression in output. * * @param gzip */ public void useGzip(boolean gzip) { this.gzip = gzip; } /** * Set the ingress strategy {random, oblivious}. * * @see {ObliviousIngress} * @see {RandomIngress} * @param ingress */ public void setIngress(String ingress) { if (ingress.equals("random") || ingress.equals("greedy")) this.ingress = ingress; else { LOG.error("Unknown ingress method: " + ingress + "\n Supported ingress methods: oblivious, random"); LOG.error("Use the default oblivious ingress"); this.ingress = "greedy"; } } /** * Set the intermediate key value class. * * @param keyClass * @param valClass */ public void setKeyValueClass(Class keyClass, Class valClass) { try { this.mapkeytype = (IngressKeyType) keyClass.newInstance(); this.mapvaltype = (IngressValueType) valClass.newInstance(); } catch (InstantiationException e) { e.printStackTrace(); } catch (IllegalAccessException e) { e.printStackTrace(); } } /** * @return JobConf of the current job. */ public JobConf getConf() { return conf; } /** * @param inputpath * @param outputpath * @param numProcs * @param ingress * @throws IOException */ public void run(String[] inputpaths, String outputpath, int numProcs, String ingress) throws IOException { this.setIngress(ingress); conf.setJobName(jobName); if (this.subpartPerPartition <= 0) this.subpartPerPartition = 8; LOG.info("===== Job: Partition edges and create vertex records ========="); LOG.info("input: " + StringUtils.join(inputpaths, ",")); LOG.info("output: " + outputpath); LOG.info("numProc = " + numProcs); LOG.info("subpartPerPartition = " + subpartPerPartition); LOG.info("keyclass = " + this.mapkeytype.getClass().getName()); LOG.info("valclass = " + this.mapvaltype.getClass().getName()); LOG.debug("graphparser = " + this.graphparser.getClass().getName()); LOG.debug("vidparser = " + this.vidparser.getClass().getName()); LOG.debug("vdataparser = " + this.vdataparser.getClass().getName()); LOG.debug("edataparser = " + this.edataparser.getClass().getName()); LOG.info("ingress = " + this.ingress); LOG.info("gzip = " + Boolean.toString(gzip)); LOG.info("==============================================================="); conf.set("ingress", this.ingress); conf.setInt("numProcs", numProcs); conf.set("GraphParser", graphparser.getClass().getName()); conf.set("VidParser", vidparser.getClass().getName()); conf.set("VdataParser", vdataparser.getClass().getName()); conf.set("EdataParser", edataparser.getClass().getName()); conf.setInt("subpartPerPartition", subpartPerPartition); conf.setMapOutputKeyClass(this.mapkeytype.getClass()); conf.setMapOutputValueClass(this.mapvaltype.getClass()); conf.setOutputKeyClass(IntWritable.class); conf.setOutputValueClass(Text.class); conf.setMapperClass(EdgeIngressMapper.class); conf.setCombinerClass(EdgeIngressCombiner.class); conf.setReducerClass(EdgeIngressReducer.class); // GraphOutput output = new GLGraphOutput(numProcs); GraphOutput output = new SimpleGraphOutput(); output.init(conf); conf.setInputFormat(TextInputFormat.class); // conf.setOutputFormat(PartitionedGraphOutputFormat.class); if (gzip) { TextOutputFormat.setCompressOutput(conf, true); TextOutputFormat.setOutputCompressorClass(conf, GzipCodec.class); } for (String path : inputpaths) FileInputFormat.addInputPath(conf, new Path(path)); FileOutputFormat.setOutputPath(conf, new Path(outputpath)); if (!checkTypes()) { LOG.fatal("Type check failed." + "Please check the parsers are consistent with key/val types."); return; } JobClient.runJob(conf); LOG.info("================== Done ====================================\n"); } /** * Ensure the keytype, valuetype are consistent with the parser type. * @return true if type check. */ private boolean checkTypes() { boolean check = true; if (!(mapkeytype.createVid().getClass()).equals(mapvaltype .getGraphTypeFactory().createVid().getClass())) { LOG.fatal("VidType is not consistant between MapKeyType: " + mapkeytype.createVid().getClass().getName() + " and MapValueType: " + mapvaltype.getGraphTypeFactory().createVid().getClass().getName()); check = false; } if (!(vidparser.getType()).equals(mapkeytype.createVid().getClass())) { LOG.fatal("VidType is not consistant between MapKeyType: " + mapkeytype.createVid().getClass().getName() + " and Parser: " + vidparser.getType().getName()); check = false; } if (!(vdataparser.getType().equals(mapvaltype.getGraphTypeFactory() .createVdata().getClass()))) { LOG.fatal("VertexDataType is not consistant between MapValueType: " + mapvaltype.getGraphTypeFactory().createVdata().getClass().getName() + " and Parser: " + vdataparser.getType().getName()); check = false; } if (!(edataparser.getType().equals(mapvaltype.getGraphTypeFactory() .createEdata().getClass()))) { LOG.fatal("EdgeDataType is not consistant between MapValueType: " + mapvaltype.getGraphTypeFactory().createEdata().getClass().getName() + " and Parser: " + edataparser.getType().getName()); check = false; } return check; } /** * Set the number of subpartitions per real partition. * @param n number of subpartitions per real partition. */ public void setTotalSubPartition(int n) { this.subpartPerPartition = n; } private JobConf conf; private GraphParser graphparser; private FieldParser vidparser; private FieldParser vdataparser; private FieldParser edataparser; private boolean gzip; private String jobName; private String ingress; private int subpartPerPartition; private IngressKeyType mapkeytype; private IngressValueType mapvaltype; }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.aggregations.metrics; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.join.ScoreMode; import org.apache.lucene.util.ArrayUtil; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.MockScriptEngine; import org.elasticsearch.script.MockScriptPlugin; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHitField; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode; import org.elasticsearch.search.aggregations.bucket.global.Global; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.nested.Nested; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregatorFactory.ExecutionMode; import org.elasticsearch.search.aggregations.metrics.max.Max; import org.elasticsearch.search.aggregations.metrics.tophits.TopHits; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.search.fetch.subphase.highlight.HighlightField; import org.elasticsearch.search.sort.ScriptSortBuilder.ScriptSortType; import org.elasticsearch.search.rescore.RescoreBuilder; import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.function.Function; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.common.xcontent.XContentFactory.smileBuilder; import static org.elasticsearch.common.xcontent.XContentFactory.yamlBuilder; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.matchQuery; import static org.elasticsearch.index.query.QueryBuilders.nestedQuery; import static org.elasticsearch.search.aggregations.AggregationBuilders.global; import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; import static org.elasticsearch.search.aggregations.AggregationBuilders.max; import static org.elasticsearch.search.aggregations.AggregationBuilders.nested; import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; import static org.elasticsearch.search.aggregations.AggregationBuilders.topHits; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.sameInstance; @ESIntegTestCase.SuiteScopeTestCase() public class TopHitsIT extends ESIntegTestCase { private static final String TERMS_AGGS_FIELD = "terms"; private static final String SORT_FIELD = "sort"; @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Collections.singleton(CustomScriptPlugin.class); } public static class CustomScriptPlugin extends MockScriptPlugin { @Override protected Map<String, Function<Map<String, Object>, Object>> pluginScripts() { return Collections.singletonMap("5", script -> "5"); } } public static String randomExecutionHint() { return randomBoolean() ? null : randomFrom(ExecutionMode.values()).toString(); } static int numArticles; @Override public void setupSuiteScopeCluster() throws Exception { assertAcked(prepareCreate("idx").addMapping("type", TERMS_AGGS_FIELD, "type=keyword", "group", "type=keyword")); createIndex("empty"); assertAcked(prepareCreate("articles").addMapping("article", jsonBuilder().startObject().startObject("article").startObject("properties") .startObject(TERMS_AGGS_FIELD) .field("type", "keyword") .endObject() .startObject("comments") .field("type", "nested") .startObject("properties") .startObject("user") .field("type", "keyword") .endObject() .startObject("date") .field("type", "long") .endObject() .startObject("message") .field("type", "text") .field("store", true) .field("term_vector", "with_positions_offsets") .field("index_options", "offsets") .endObject() .startObject("reviewers") .field("type", "nested") .startObject("properties") .startObject("name") .field("type", "keyword") .endObject() .endObject() .endObject() .endObject() .endObject() .endObject().endObject().endObject())); ensureGreen("idx", "empty", "articles"); List<IndexRequestBuilder> builders = new ArrayList<>(); for (int i = 0; i < 50; i++) { builders.add(client().prepareIndex("idx", "type", Integer.toString(i)).setSource(jsonBuilder() .startObject() .field(TERMS_AGGS_FIELD, "val" + (i / 10)) .field(SORT_FIELD, i + 1) .field("text", "some text to entertain") .field("field1", 5) .endObject())); } builders.add(client().prepareIndex("idx", "field-collapsing", "1").setSource(jsonBuilder() .startObject() .field("group", "a") .field("text", "term x y z b") .endObject())); builders.add(client().prepareIndex("idx", "field-collapsing", "2").setSource(jsonBuilder() .startObject() .field("group", "a") .field("text", "term x y z n rare") .field("value", 1) .endObject())); builders.add(client().prepareIndex("idx", "field-collapsing", "3").setSource(jsonBuilder() .startObject() .field("group", "b") .field("text", "x y z term") .endObject())); builders.add(client().prepareIndex("idx", "field-collapsing", "4").setSource(jsonBuilder() .startObject() .field("group", "b") .field("text", "x y term") .endObject())); builders.add(client().prepareIndex("idx", "field-collapsing", "5").setSource(jsonBuilder() .startObject() .field("group", "b") .field("text", "x term") .endObject())); builders.add(client().prepareIndex("idx", "field-collapsing", "6").setSource(jsonBuilder() .startObject() .field("group", "b") .field("text", "term rare") .field("value", 3) .endObject())); builders.add(client().prepareIndex("idx", "field-collapsing", "7").setSource(jsonBuilder() .startObject() .field("group", "c") .field("text", "x y z term") .endObject())); builders.add(client().prepareIndex("idx", "field-collapsing", "8").setSource(jsonBuilder() .startObject() .field("group", "c") .field("text", "x y term b") .endObject())); builders.add(client().prepareIndex("idx", "field-collapsing", "9").setSource(jsonBuilder() .startObject() .field("group", "c") .field("text", "rare x term") .field("value", 2) .endObject())); numArticles = scaledRandomIntBetween(10, 100); numArticles -= (numArticles % 5); for (int i = 0; i < numArticles; i++) { XContentBuilder builder = randomFrom(jsonBuilder(), yamlBuilder(), smileBuilder()); builder.startObject().field("date", i).startArray("comments"); for (int j = 0; j < i; j++) { String user = Integer.toString(j); builder.startObject().field("id", j).field("user", user).field("message", "some text").endObject(); } builder.endArray().endObject(); builders.add( client().prepareIndex("articles", "article").setSource(builder) ); } builders.add( client().prepareIndex("articles", "article", "1") .setSource(jsonBuilder().startObject().field("title", "title 1").field("body", "some text").startArray("comments") .startObject() .field("user", "a").field("date", 1L).field("message", "some comment") .startArray("reviewers") .startObject().field("name", "user a").endObject() .startObject().field("name", "user b").endObject() .startObject().field("name", "user c").endObject() .endArray() .endObject() .startObject() .field("user", "b").field("date", 2L).field("message", "some other comment") .startArray("reviewers") .startObject().field("name", "user c").endObject() .startObject().field("name", "user d").endObject() .startObject().field("name", "user e").endObject() .endArray() .endObject() .endArray().endObject()) ); builders.add( client().prepareIndex("articles", "article", "2") .setSource(jsonBuilder().startObject().field("title", "title 2").field("body", "some different text").startArray("comments") .startObject() .field("user", "b").field("date", 3L).field("message", "some comment") .startArray("reviewers") .startObject().field("name", "user f").endObject() .endArray() .endObject() .startObject().field("user", "c").field("date", 4L).field("message", "some other comment").endObject() .endArray().endObject()) ); indexRandom(true, builders); ensureSearchable(); } private String key(Terms.Bucket bucket) { return bucket.getKeyAsString(); } public void testBasics() throws Exception { SearchResponse response = client() .prepareSearch("idx") .setTypes("type") .addAggregation(terms("terms") .executionHint(randomExecutionHint()) .field(TERMS_AGGS_FIELD) .subAggregation( topHits("hits").sort(SortBuilders.fieldSort(SORT_FIELD).order(SortOrder.DESC)) ) ) .get(); assertSearchResponse(response); Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); assertThat(terms.getBuckets().size(), equalTo(5)); long higestSortValue = 0; for (int i = 0; i < 5; i++) { Terms.Bucket bucket = terms.getBucketByKey("val" + i); assertThat(bucket, notNullValue()); assertThat(key(bucket), equalTo("val" + i)); assertThat(bucket.getDocCount(), equalTo(10L)); TopHits topHits = bucket.getAggregations().get("hits"); SearchHits hits = topHits.getHits(); assertThat(hits.totalHits(), equalTo(10L)); assertThat(hits.getHits().length, equalTo(3)); higestSortValue += 10; assertThat((Long) hits.getAt(0).sortValues()[0], equalTo(higestSortValue)); assertThat((Long) hits.getAt(1).sortValues()[0], equalTo(higestSortValue - 1)); assertThat((Long) hits.getAt(2).sortValues()[0], equalTo(higestSortValue - 2)); assertThat(hits.getAt(0).sourceAsMap().size(), equalTo(4)); } } public void testIssue11119() throws Exception { // Test that top_hits aggregation is fed scores if query results size=0 SearchResponse response = client() .prepareSearch("idx") .setTypes("field-collapsing") .setSize(0) .setQuery(matchQuery("text", "x y z")) .addAggregation(terms("terms").executionHint(randomExecutionHint()).field("group").subAggregation(topHits("hits"))) .get(); assertSearchResponse(response); assertThat(response.getHits().getTotalHits(), equalTo(8L)); assertThat(response.getHits().hits().length, equalTo(0)); assertThat(response.getHits().maxScore(), equalTo(0f)); Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); assertThat(terms.getBuckets().size(), equalTo(3)); for (Terms.Bucket bucket : terms.getBuckets()) { assertThat(bucket, notNullValue()); TopHits topHits = bucket.getAggregations().get("hits"); SearchHits hits = topHits.getHits(); float bestScore = Float.MAX_VALUE; for (int h = 0; h < hits.getHits().length; h++) { float score=hits.getAt(h).getScore(); assertThat(score, lessThanOrEqualTo(bestScore)); assertThat(score, greaterThan(0f)); bestScore = hits.getAt(h).getScore(); } } // Also check that min_score setting works when size=0 // (technically not a test of top_hits but implementation details are // tied up with the need to feed scores into the agg tree even when // users don't want ranked set of query results.) response = client() .prepareSearch("idx") .setTypes("field-collapsing") .setSize(0) .setMinScore(0.0001f) .setQuery(matchQuery("text", "x y z")) .addAggregation(terms("terms").executionHint(randomExecutionHint()).field("group")) .get(); assertSearchResponse(response); assertThat(response.getHits().getTotalHits(), equalTo(8L)); assertThat(response.getHits().hits().length, equalTo(0)); assertThat(response.getHits().maxScore(), equalTo(0f)); terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); assertThat(terms.getBuckets().size(), equalTo(3)); } public void testBreadthFirstWithScoreNeeded() throws Exception { SearchResponse response = client().prepareSearch("idx").setTypes("type") .addAggregation(terms("terms") .executionHint(randomExecutionHint()) .collectMode(SubAggCollectionMode.BREADTH_FIRST) .field(TERMS_AGGS_FIELD) .subAggregation(topHits("hits").size(3)) ).get(); assertSearchResponse(response); Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); assertThat(terms.getBuckets().size(), equalTo(5)); for (int i = 0; i < 5; i++) { Terms.Bucket bucket = terms.getBucketByKey("val" + i); assertThat(bucket, notNullValue()); assertThat(key(bucket), equalTo("val" + i)); assertThat(bucket.getDocCount(), equalTo(10L)); TopHits topHits = bucket.getAggregations().get("hits"); SearchHits hits = topHits.getHits(); assertThat(hits.totalHits(), equalTo(10L)); assertThat(hits.getHits().length, equalTo(3)); assertThat(hits.getAt(0).sourceAsMap().size(), equalTo(4)); } } public void testBreadthFirstWithAggOrderAndScoreNeeded() throws Exception { SearchResponse response = client().prepareSearch("idx").setTypes("type") .addAggregation(terms("terms") .executionHint(randomExecutionHint()) .collectMode(SubAggCollectionMode.BREADTH_FIRST) .field(TERMS_AGGS_FIELD) .order(Terms.Order.aggregation("max", false)) .subAggregation(max("max").field(SORT_FIELD)) .subAggregation(topHits("hits").size(3)) ).get(); assertSearchResponse(response); Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); assertThat(terms.getBuckets().size(), equalTo(5)); int id = 4; for (Terms.Bucket bucket : terms.getBuckets()) { assertThat(bucket, notNullValue()); assertThat(key(bucket), equalTo("val" + id)); assertThat(bucket.getDocCount(), equalTo(10L)); TopHits topHits = bucket.getAggregations().get("hits"); SearchHits hits = topHits.getHits(); assertThat(hits.totalHits(), equalTo(10L)); assertThat(hits.getHits().length, equalTo(3)); assertThat(hits.getAt(0).sourceAsMap().size(), equalTo(4)); id --; } } public void testBasicsGetProperty() throws Exception { SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(global("global").subAggregation(topHits("hits"))).execute().actionGet(); assertSearchResponse(searchResponse); Global global = searchResponse.getAggregations().get("global"); assertThat(global, notNullValue()); assertThat(global.getName(), equalTo("global")); assertThat(global.getAggregations(), notNullValue()); assertThat(global.getAggregations().asMap().size(), equalTo(1)); TopHits topHits = global.getAggregations().get("hits"); assertThat(topHits, notNullValue()); assertThat(topHits.getName(), equalTo("hits")); assertThat((TopHits) global.getProperty("hits"), sameInstance(topHits)); } public void testPagination() throws Exception { int size = randomIntBetween(1, 10); int from = randomIntBetween(0, 10); SearchResponse response = client().prepareSearch("idx").setTypes("type") .addAggregation(terms("terms") .executionHint(randomExecutionHint()) .field(TERMS_AGGS_FIELD) .subAggregation( topHits("hits").sort(SortBuilders.fieldSort(SORT_FIELD).order(SortOrder.DESC)) .from(from) .size(size) ) ) .get(); assertSearchResponse(response); SearchResponse control = client().prepareSearch("idx") .setTypes("type") .setFrom(from) .setSize(size) .setPostFilter(QueryBuilders.termQuery(TERMS_AGGS_FIELD, "val0")) .addSort(SORT_FIELD, SortOrder.DESC) .get(); assertSearchResponse(control); SearchHits controlHits = control.getHits(); Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); assertThat(terms.getBuckets().size(), equalTo(5)); Terms.Bucket bucket = terms.getBucketByKey("val0"); assertThat(bucket, notNullValue()); assertThat(bucket.getDocCount(), equalTo(10L)); TopHits topHits = bucket.getAggregations().get("hits"); SearchHits hits = topHits.getHits(); assertThat(hits.totalHits(), equalTo(controlHits.totalHits())); assertThat(hits.getHits().length, equalTo(controlHits.getHits().length)); for (int i = 0; i < hits.getHits().length; i++) { logger.info("{}: top_hits: [{}][{}] control: [{}][{}]", i, hits.getAt(i).id(), hits.getAt(i).sortValues()[0], controlHits.getAt(i).id(), controlHits.getAt(i).sortValues()[0]); assertThat(hits.getAt(i).id(), equalTo(controlHits.getAt(i).id())); assertThat(hits.getAt(i).sortValues()[0], equalTo(controlHits.getAt(i).sortValues()[0])); } } public void testSortByBucket() throws Exception { SearchResponse response = client().prepareSearch("idx").setTypes("type") .addAggregation(terms("terms") .executionHint(randomExecutionHint()) .field(TERMS_AGGS_FIELD) .order(Terms.Order.aggregation("max_sort", false)) .subAggregation( topHits("hits").sort(SortBuilders.fieldSort(SORT_FIELD).order(SortOrder.DESC)).trackScores(true) ) .subAggregation( max("max_sort").field(SORT_FIELD) ) ) .get(); assertSearchResponse(response); Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); assertThat(terms.getBuckets().size(), equalTo(5)); long higestSortValue = 50; int currentBucket = 4; for (Terms.Bucket bucket : terms.getBuckets()) { assertThat(key(bucket), equalTo("val" + currentBucket--)); assertThat(bucket.getDocCount(), equalTo(10L)); TopHits topHits = bucket.getAggregations().get("hits"); SearchHits hits = topHits.getHits(); assertThat(hits.totalHits(), equalTo(10L)); assertThat(hits.getHits().length, equalTo(3)); assertThat((Long) hits.getAt(0).sortValues()[0], equalTo(higestSortValue)); assertThat((Long) hits.getAt(1).sortValues()[0], equalTo(higestSortValue - 1)); assertThat((Long) hits.getAt(2).sortValues()[0], equalTo(higestSortValue - 2)); Max max = bucket.getAggregations().get("max_sort"); assertThat(max.getValue(), equalTo(((Long) higestSortValue).doubleValue())); higestSortValue -= 10; } } public void testFieldCollapsing() throws Exception { SearchResponse response = client() .prepareSearch("idx") .setTypes("field-collapsing") .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) .setQuery(matchQuery("text", "term rare")) .addAggregation( terms("terms").executionHint(randomExecutionHint()).field("group") .order(Terms.Order.aggregation("max_score", false)).subAggregation(topHits("hits").size(1)) .subAggregation(max("max_score").field("value"))).get(); assertSearchResponse(response); Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); assertThat(terms.getBuckets().size(), equalTo(3)); Iterator<Terms.Bucket> bucketIterator = terms.getBuckets().iterator(); Terms.Bucket bucket = bucketIterator.next(); assertThat(key(bucket), equalTo("b")); TopHits topHits = bucket.getAggregations().get("hits"); SearchHits hits = topHits.getHits(); assertThat(hits.totalHits(), equalTo(4L)); assertThat(hits.getHits().length, equalTo(1)); assertThat(hits.getAt(0).id(), equalTo("6")); bucket = bucketIterator.next(); assertThat(key(bucket), equalTo("c")); topHits = bucket.getAggregations().get("hits"); hits = topHits.getHits(); assertThat(hits.totalHits(), equalTo(3L)); assertThat(hits.getHits().length, equalTo(1)); assertThat(hits.getAt(0).id(), equalTo("9")); bucket = bucketIterator.next(); assertThat(key(bucket), equalTo("a")); topHits = bucket.getAggregations().get("hits"); hits = topHits.getHits(); assertThat(hits.totalHits(), equalTo(2L)); assertThat(hits.getHits().length, equalTo(1)); assertThat(hits.getAt(0).id(), equalTo("2")); } public void testFetchFeatures() { SearchResponse response = client().prepareSearch("idx").setTypes("type") .setQuery(matchQuery("text", "text").queryName("test")) .addAggregation(terms("terms") .executionHint(randomExecutionHint()) .field(TERMS_AGGS_FIELD) .subAggregation( topHits("hits").size(1) .highlighter(new HighlightBuilder().field("text")) .explain(true) .storedField("text") .fieldDataField("field1") .scriptField("script", new Script("5", ScriptService.ScriptType.INLINE, MockScriptEngine.NAME, Collections.emptyMap())) .fetchSource("text", null) .version(true) ) ) .get(); assertSearchResponse(response); Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); assertThat(terms.getBuckets().size(), equalTo(5)); for (Terms.Bucket bucket : terms.getBuckets()) { TopHits topHits = bucket.getAggregations().get("hits"); SearchHits hits = topHits.getHits(); assertThat(hits.totalHits(), equalTo(10L)); assertThat(hits.getHits().length, equalTo(1)); SearchHit hit = hits.getAt(0); HighlightField highlightField = hit.getHighlightFields().get("text"); assertThat(highlightField.getFragments().length, equalTo(1)); assertThat(highlightField.getFragments()[0].string(), equalTo("some <em>text</em> to entertain")); Explanation explanation = hit.explanation(); assertThat(explanation.toString(), containsString("text:text")); long version = hit.version(); assertThat(version, equalTo(1L)); assertThat(hit.matchedQueries()[0], equalTo("test")); SearchHitField field = hit.field("field1"); assertThat(field.getValue().toString(), equalTo("5")); assertThat(hit.getSource().get("text").toString(), equalTo("some text to entertain")); field = hit.field("script"); assertThat(field.getValue().toString(), equalTo("5")); assertThat(hit.sourceAsMap().size(), equalTo(1)); assertThat(hit.sourceAsMap().get("text").toString(), equalTo("some text to entertain")); } } public void testInvalidSortField() throws Exception { try { client().prepareSearch("idx").setTypes("type") .addAggregation(terms("terms") .executionHint(randomExecutionHint()) .field(TERMS_AGGS_FIELD) .subAggregation( topHits("hits").sort(SortBuilders.fieldSort("xyz").order(SortOrder.DESC)) ) ).get(); fail(); } catch (SearchPhaseExecutionException e) { assertThat(e.toString(), containsString("No mapping found for [xyz] in order to sort on")); } } public void testEmptyIndex() throws Exception { SearchResponse response = client().prepareSearch("empty").setTypes("type") .addAggregation(topHits("hits")) .get(); assertSearchResponse(response); TopHits hits = response.getAggregations().get("hits"); assertThat(hits, notNullValue()); assertThat(hits.getName(), equalTo("hits")); assertThat(hits.getHits().totalHits(), equalTo(0L)); } public void testTrackScores() throws Exception { boolean[] trackScores = new boolean[]{true, false}; for (boolean trackScore : trackScores) { logger.info("Track score={}", trackScore); SearchResponse response = client().prepareSearch("idx").setTypes("field-collapsing") .setQuery(matchQuery("text", "term rare")) .addAggregation(terms("terms") .field("group") .subAggregation( topHits("hits") .trackScores(trackScore) .size(1) .sort("_uid", SortOrder.DESC) ) ) .get(); assertSearchResponse(response); Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); assertThat(terms.getBuckets().size(), equalTo(3)); Terms.Bucket bucket = terms.getBucketByKey("a"); assertThat(key(bucket), equalTo("a")); TopHits topHits = bucket.getAggregations().get("hits"); SearchHits hits = topHits.getHits(); assertThat(hits.getMaxScore(), trackScore ? not(equalTo(Float.NaN)) : equalTo(Float.NaN)); assertThat(hits.getAt(0).score(), trackScore ? not(equalTo(Float.NaN)) : equalTo(Float.NaN)); bucket = terms.getBucketByKey("b"); assertThat(key(bucket), equalTo("b")); topHits = bucket.getAggregations().get("hits"); hits = topHits.getHits(); assertThat(hits.getMaxScore(), trackScore ? not(equalTo(Float.NaN)) : equalTo(Float.NaN)); assertThat(hits.getAt(0).score(), trackScore ? not(equalTo(Float.NaN)) : equalTo(Float.NaN)); bucket = terms.getBucketByKey("c"); assertThat(key(bucket), equalTo("c")); topHits = bucket.getAggregations().get("hits"); hits = topHits.getHits(); assertThat(hits.getMaxScore(), trackScore ? not(equalTo(Float.NaN)) : equalTo(Float.NaN)); assertThat(hits.getAt(0).score(), trackScore ? not(equalTo(Float.NaN)) : equalTo(Float.NaN)); } } public void testTopHitsInNestedSimple() throws Exception { SearchResponse searchResponse = client().prepareSearch("articles") .setQuery(matchQuery("title", "title")) .addAggregation( nested("to-comments", "comments") .subAggregation( terms("users") .field("comments.user") .subAggregation( topHits("top-comments").sort("comments.date", SortOrder.ASC) ) ) ) .get(); Nested nested = searchResponse.getAggregations().get("to-comments"); assertThat(nested.getDocCount(), equalTo(4L)); Terms terms = nested.getAggregations().get("users"); Terms.Bucket bucket = terms.getBucketByKey("a"); assertThat(bucket.getDocCount(), equalTo(1L)); TopHits topHits = bucket.getAggregations().get("top-comments"); SearchHits searchHits = topHits.getHits(); assertThat(searchHits.totalHits(), equalTo(1L)); assertThat(searchHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(searchHits.getAt(0).getNestedIdentity().getOffset(), equalTo(0)); assertThat((Integer) searchHits.getAt(0).getSource().get("date"), equalTo(1)); bucket = terms.getBucketByKey("b"); assertThat(bucket.getDocCount(), equalTo(2L)); topHits = bucket.getAggregations().get("top-comments"); searchHits = topHits.getHits(); assertThat(searchHits.totalHits(), equalTo(2L)); assertThat(searchHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(searchHits.getAt(0).getNestedIdentity().getOffset(), equalTo(1)); assertThat((Integer) searchHits.getAt(0).getSource().get("date"), equalTo(2)); assertThat(searchHits.getAt(1).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(searchHits.getAt(1).getNestedIdentity().getOffset(), equalTo(0)); assertThat((Integer) searchHits.getAt(1).getSource().get("date"), equalTo(3)); bucket = terms.getBucketByKey("c"); assertThat(bucket.getDocCount(), equalTo(1L)); topHits = bucket.getAggregations().get("top-comments"); searchHits = topHits.getHits(); assertThat(searchHits.totalHits(), equalTo(1L)); assertThat(searchHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(searchHits.getAt(0).getNestedIdentity().getOffset(), equalTo(1)); assertThat((Integer) searchHits.getAt(0).getSource().get("date"), equalTo(4)); } public void testTopHitsInSecondLayerNested() throws Exception { SearchResponse searchResponse = client().prepareSearch("articles") .setQuery(matchQuery("title", "title")) .addAggregation( nested("to-comments", "comments") .subAggregation( nested("to-reviewers", "comments.reviewers").subAggregation( // Also need to sort on _doc because there are two reviewers with the same name topHits("top-reviewers").sort("comments.reviewers.name", SortOrder.ASC).sort("_doc", SortOrder.DESC).size(7) ) ) .subAggregation(topHits("top-comments").sort("comments.date", SortOrder.DESC).size(4)) ).get(); assertNoFailures(searchResponse); Nested toComments = searchResponse.getAggregations().get("to-comments"); assertThat(toComments.getDocCount(), equalTo(4L)); TopHits topComments = toComments.getAggregations().get("top-comments"); assertThat(topComments.getHits().totalHits(), equalTo(4L)); assertThat(topComments.getHits().getHits().length, equalTo(4)); assertThat(topComments.getHits().getAt(0).getId(), equalTo("2")); assertThat(topComments.getHits().getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(topComments.getHits().getAt(0).getNestedIdentity().getOffset(), equalTo(1)); assertThat(topComments.getHits().getAt(0).getNestedIdentity().getChild(), nullValue()); assertThat(topComments.getHits().getAt(1).getId(), equalTo("2")); assertThat(topComments.getHits().getAt(1).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(topComments.getHits().getAt(1).getNestedIdentity().getOffset(), equalTo(0)); assertThat(topComments.getHits().getAt(1).getNestedIdentity().getChild(), nullValue()); assertThat(topComments.getHits().getAt(2).getId(), equalTo("1")); assertThat(topComments.getHits().getAt(2).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(topComments.getHits().getAt(2).getNestedIdentity().getOffset(), equalTo(1)); assertThat(topComments.getHits().getAt(2).getNestedIdentity().getChild(), nullValue()); assertThat(topComments.getHits().getAt(3).getId(), equalTo("1")); assertThat(topComments.getHits().getAt(3).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(topComments.getHits().getAt(3).getNestedIdentity().getOffset(), equalTo(0)); assertThat(topComments.getHits().getAt(3).getNestedIdentity().getChild(), nullValue()); Nested toReviewers = toComments.getAggregations().get("to-reviewers"); assertThat(toReviewers.getDocCount(), equalTo(7L)); TopHits topReviewers = toReviewers.getAggregations().get("top-reviewers"); assertThat(topReviewers.getHits().totalHits(), equalTo(7L)); assertThat(topReviewers.getHits().getHits().length, equalTo(7)); assertThat(topReviewers.getHits().getAt(0).getId(), equalTo("1")); assertThat((String) topReviewers.getHits().getAt(0).sourceAsMap().get("name"), equalTo("user a")); assertThat(topReviewers.getHits().getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(topReviewers.getHits().getAt(0).getNestedIdentity().getOffset(), equalTo(0)); assertThat(topReviewers.getHits().getAt(0).getNestedIdentity().getChild().getField().string(), equalTo("reviewers")); assertThat(topReviewers.getHits().getAt(0).getNestedIdentity().getChild().getOffset(), equalTo(0)); assertThat(topReviewers.getHits().getAt(1).getId(), equalTo("1")); assertThat((String) topReviewers.getHits().getAt(1).sourceAsMap().get("name"), equalTo("user b")); assertThat(topReviewers.getHits().getAt(1).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(topReviewers.getHits().getAt(1).getNestedIdentity().getOffset(), equalTo(0)); assertThat(topReviewers.getHits().getAt(1).getNestedIdentity().getChild().getField().string(), equalTo("reviewers")); assertThat(topReviewers.getHits().getAt(1).getNestedIdentity().getChild().getOffset(), equalTo(1)); assertThat(topReviewers.getHits().getAt(2).getId(), equalTo("1")); assertThat((String) topReviewers.getHits().getAt(2).sourceAsMap().get("name"), equalTo("user c")); assertThat(topReviewers.getHits().getAt(2).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(topReviewers.getHits().getAt(2).getNestedIdentity().getOffset(), equalTo(0)); assertThat(topReviewers.getHits().getAt(2).getNestedIdentity().getChild().getField().string(), equalTo("reviewers")); assertThat(topReviewers.getHits().getAt(2).getNestedIdentity().getChild().getOffset(), equalTo(2)); assertThat(topReviewers.getHits().getAt(3).getId(), equalTo("1")); assertThat((String) topReviewers.getHits().getAt(3).sourceAsMap().get("name"), equalTo("user c")); assertThat(topReviewers.getHits().getAt(3).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(topReviewers.getHits().getAt(3).getNestedIdentity().getOffset(), equalTo(1)); assertThat(topReviewers.getHits().getAt(3).getNestedIdentity().getChild().getField().string(), equalTo("reviewers")); assertThat(topReviewers.getHits().getAt(3).getNestedIdentity().getChild().getOffset(), equalTo(0)); assertThat(topReviewers.getHits().getAt(4).getId(), equalTo("1")); assertThat((String) topReviewers.getHits().getAt(4).sourceAsMap().get("name"), equalTo("user d")); assertThat(topReviewers.getHits().getAt(4).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(topReviewers.getHits().getAt(4).getNestedIdentity().getOffset(), equalTo(1)); assertThat(topReviewers.getHits().getAt(4).getNestedIdentity().getChild().getField().string(), equalTo("reviewers")); assertThat(topReviewers.getHits().getAt(4).getNestedIdentity().getChild().getOffset(), equalTo(1)); assertThat(topReviewers.getHits().getAt(5).getId(), equalTo("1")); assertThat((String) topReviewers.getHits().getAt(5).sourceAsMap().get("name"), equalTo("user e")); assertThat(topReviewers.getHits().getAt(5).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(topReviewers.getHits().getAt(5).getNestedIdentity().getOffset(), equalTo(1)); assertThat(topReviewers.getHits().getAt(5).getNestedIdentity().getChild().getField().string(), equalTo("reviewers")); assertThat(topReviewers.getHits().getAt(5).getNestedIdentity().getChild().getOffset(), equalTo(2)); assertThat(topReviewers.getHits().getAt(6).getId(), equalTo("2")); assertThat((String) topReviewers.getHits().getAt(6).sourceAsMap().get("name"), equalTo("user f")); assertThat(topReviewers.getHits().getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(topReviewers.getHits().getAt(0).getNestedIdentity().getOffset(), equalTo(0)); assertThat(topReviewers.getHits().getAt(0).getNestedIdentity().getChild().getField().string(), equalTo("reviewers")); assertThat(topReviewers.getHits().getAt(0).getNestedIdentity().getChild().getOffset(), equalTo(0)); } public void testNestedFetchFeatures() { String hlType = randomFrom("plain", "fvh", "postings"); HighlightBuilder.Field hlField = new HighlightBuilder.Field("comments.message") .highlightQuery(matchQuery("comments.message", "comment")) .forceSource(randomBoolean()) // randomly from stored field or _source .highlighterType(hlType); SearchResponse searchResponse = client() .prepareSearch("articles") .setQuery(nestedQuery("comments", matchQuery("comments.message", "comment").queryName("test"), ScoreMode.Avg)) .addAggregation( nested("to-comments", "comments").subAggregation( topHits("top-comments").size(1).highlighter(new HighlightBuilder().field(hlField)).explain(true) .fieldDataField("comments.user") .scriptField("script", new Script("5", ScriptService.ScriptType.INLINE, MockScriptEngine.NAME, Collections.emptyMap())).fetchSource("comments.message", null) .version(true).sort("comments.date", SortOrder.ASC))).get(); assertHitCount(searchResponse, 2); Nested nested = searchResponse.getAggregations().get("to-comments"); assertThat(nested.getDocCount(), equalTo(4L)); SearchHits hits = ((TopHits) nested.getAggregations().get("top-comments")).getHits(); assertThat(hits.totalHits(), equalTo(4L)); SearchHit searchHit = hits.getAt(0); assertThat(searchHit.getId(), equalTo("1")); assertThat(searchHit.getNestedIdentity().getField().string(), equalTo("comments")); assertThat(searchHit.getNestedIdentity().getOffset(), equalTo(0)); HighlightField highlightField = searchHit.getHighlightFields().get("comments.message"); assertThat(highlightField.getFragments().length, equalTo(1)); assertThat(highlightField.getFragments()[0].string(), equalTo("some <em>comment</em>")); // Can't explain nested hit with the main query, since both are in a different scopes, also the nested doc may not even have matched with the main query // If top_hits would have a query option then we can explain that query Explanation explanation = searchHit.explanation(); assertFalse(explanation.isMatch()); // Returns the version of the root document. Nested docs don't have a separate version long version = searchHit.version(); assertThat(version, equalTo(1L)); assertThat(searchHit.matchedQueries(), arrayContaining("test")); SearchHitField field = searchHit.field("comments.user"); assertThat(field.getValue().toString(), equalTo("a")); field = searchHit.field("script"); assertThat(field.getValue().toString(), equalTo("5")); assertThat(searchHit.sourceAsMap().size(), equalTo(1)); assertThat(XContentMapValues.extractValue("comments.message", searchHit.sourceAsMap()), equalTo("some comment")); } public void testTopHitsInNested() throws Exception { SearchResponse searchResponse = client().prepareSearch("articles") .addAggregation( histogram("dates") .field("date") .interval(5) .order(Histogram.Order.aggregation("to-comments", true)) .subAggregation( nested("to-comments", "comments") .subAggregation(topHits("comments") .highlighter(new HighlightBuilder().field(new HighlightBuilder.Field("comments.message").highlightQuery(matchQuery("comments.message", "text")))) .sort("comments.id", SortOrder.ASC)) ) ) .get(); Histogram histogram = searchResponse.getAggregations().get("dates"); for (int i = 0; i < numArticles; i += 5) { Histogram.Bucket bucket = histogram.getBuckets().get(i / 5); assertThat(bucket.getDocCount(), equalTo(5L)); long numNestedDocs = 10 + (5 * i); Nested nested = bucket.getAggregations().get("to-comments"); assertThat(nested.getDocCount(), equalTo(numNestedDocs)); TopHits hits = nested.getAggregations().get("comments"); SearchHits searchHits = hits.getHits(); assertThat(searchHits.totalHits(), equalTo(numNestedDocs)); for (int j = 0; j < 3; j++) { assertThat(searchHits.getAt(j).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(searchHits.getAt(j).getNestedIdentity().getOffset(), equalTo(0)); assertThat((Integer) searchHits.getAt(j).sourceAsMap().get("id"), equalTo(0)); HighlightField highlightField = searchHits.getAt(j).getHighlightFields().get("comments.message"); assertThat(highlightField.getFragments().length, equalTo(1)); assertThat(highlightField.getFragments()[0].string(), equalTo("some <em>text</em>")); } } } public void testDontExplode() throws Exception { SearchResponse response = client() .prepareSearch("idx") .setTypes("type") .addAggregation(terms("terms") .executionHint(randomExecutionHint()) .field(TERMS_AGGS_FIELD) .subAggregation( topHits("hits").size(ArrayUtil.MAX_ARRAY_LENGTH - 1).sort(SortBuilders.fieldSort(SORT_FIELD).order(SortOrder.DESC)) ) ) .get(); assertNoFailures(response); } public void testNoStoredFields() throws Exception { SearchResponse response = client() .prepareSearch("idx") .setTypes("type") .addAggregation(terms("terms") .executionHint(randomExecutionHint()) .field(TERMS_AGGS_FIELD) .subAggregation( topHits("hits").storedField("_none_") ) ) .get(); assertSearchResponse(response); Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); assertThat(terms.getBuckets().size(), equalTo(5)); for (int i = 0; i < 5; i++) { Terms.Bucket bucket = terms.getBucketByKey("val" + i); assertThat(bucket, notNullValue()); assertThat(key(bucket), equalTo("val" + i)); assertThat(bucket.getDocCount(), equalTo(10L)); TopHits topHits = bucket.getAggregations().get("hits"); SearchHits hits = topHits.getHits(); assertThat(hits.totalHits(), equalTo(10L)); assertThat(hits.getHits().length, equalTo(3)); for (SearchHit hit : hits) { assertThat(hit.source(), nullValue()); assertThat(hit.id(), nullValue()); assertThat(hit.type(), nullValue()); } } } /** * Make sure that a request using a script does not get cached and a request * not using a script does get cached. */ public void testDontCacheScripts() throws Exception { assertAcked(prepareCreate("cache_test_idx").addMapping("type", "d", "type=long") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) .get()); indexRandom(true, client().prepareIndex("cache_test_idx", "type", "1").setSource("s", 1), client().prepareIndex("cache_test_idx", "type", "2").setSource("s", 2)); // Make sure we are starting with a clear cache assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() .getHitCount(), equalTo(0L)); assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() .getMissCount(), equalTo(0L)); // Test that a request using a script field does not get cached SearchResponse r = client().prepareSearch("cache_test_idx").setSize(0) .addAggregation(topHits("foo").scriptField("bar", new Script("5", ScriptType.INLINE, CustomScriptPlugin.NAME, null))).get(); assertSearchResponse(r); assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() .getHitCount(), equalTo(0L)); assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() .getMissCount(), equalTo(0L)); // Test that a request using a script sort does not get cached r = client().prepareSearch("cache_test_idx").setSize(0) .addAggregation(topHits("foo").sort( SortBuilders.scriptSort(new Script("5", ScriptType.INLINE, CustomScriptPlugin.NAME, null), ScriptSortType.STRING))) .get(); assertSearchResponse(r); assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() .getHitCount(), equalTo(0L)); assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() .getMissCount(), equalTo(0L)); // To make sure that the cache is working test that a request not using // a script is cached r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation(topHits("foo")).get(); assertSearchResponse(r); assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() .getHitCount(), equalTo(0L)); assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() .getMissCount(), equalTo(1L)); } public void testWithRescore() { // Rescore with default sort on relevancy (score) { SearchResponse response = client() .prepareSearch("idx") .addRescorer( RescoreBuilder.queryRescorer(new MatchAllQueryBuilder().boost(3.0f)) ) .setTypes("type") .addAggregation(terms("terms") .field(TERMS_AGGS_FIELD) .subAggregation( topHits("hits") ) ) .get(); Terms terms = response.getAggregations().get("terms"); for (Terms.Bucket bucket : terms.getBuckets()) { TopHits topHits = bucket.getAggregations().get("hits"); for (SearchHit hit : topHits.getHits().getHits()) { assertThat(hit.score(), equalTo(4.0f)); } } } { SearchResponse response = client() .prepareSearch("idx") .addRescorer( RescoreBuilder.queryRescorer(new MatchAllQueryBuilder().boost(3.0f)) ) .setTypes("type") .addAggregation(terms("terms") .field(TERMS_AGGS_FIELD) .subAggregation( topHits("hits").sort(SortBuilders.scoreSort()) ) ) .get(); Terms terms = response.getAggregations().get("terms"); for (Terms.Bucket bucket : terms.getBuckets()) { TopHits topHits = bucket.getAggregations().get("hits"); for (SearchHit hit : topHits.getHits().getHits()) { assertThat(hit.score(), equalTo(4.0f)); } } } // Rescore should not be applied if the sort order is not relevancy { SearchResponse response = client() .prepareSearch("idx") .addRescorer( RescoreBuilder.queryRescorer(new MatchAllQueryBuilder().boost(3.0f)) ) .setTypes("type") .addAggregation(terms("terms") .field(TERMS_AGGS_FIELD) .subAggregation( topHits("hits").sort(SortBuilders.fieldSort("_type")) ) ) .get(); Terms terms = response.getAggregations().get("terms"); for (Terms.Bucket bucket : terms.getBuckets()) { TopHits topHits = bucket.getAggregations().get("hits"); for (SearchHit hit : topHits.getHits().getHits()) { assertThat(hit.score(), equalTo(Float.NaN)); } } } { SearchResponse response = client() .prepareSearch("idx") .addRescorer( RescoreBuilder.queryRescorer(new MatchAllQueryBuilder().boost(3.0f)) ) .setTypes("type") .addAggregation(terms("terms") .field(TERMS_AGGS_FIELD) .subAggregation( topHits("hits").sort(SortBuilders.scoreSort()).sort(SortBuilders.fieldSort("_type")) ) ) .get(); Terms terms = response.getAggregations().get("terms"); for (Terms.Bucket bucket : terms.getBuckets()) { TopHits topHits = bucket.getAggregations().get("hits"); for (SearchHit hit : topHits.getHits().getHits()) { assertThat(hit.score(), equalTo(Float.NaN)); } } } } }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/compute/v1/compute.proto package com.google.cloud.compute.v1; /** * * * <pre> * A request message for RegionAutoscalers.Insert. See the method description for details. * </pre> * * Protobuf type {@code google.cloud.compute.v1.InsertRegionAutoscalerRequest} */ public final class InsertRegionAutoscalerRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.compute.v1.InsertRegionAutoscalerRequest) InsertRegionAutoscalerRequestOrBuilder { private static final long serialVersionUID = 0L; // Use InsertRegionAutoscalerRequest.newBuilder() to construct. private InsertRegionAutoscalerRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private InsertRegionAutoscalerRequest() { project_ = ""; region_ = ""; requestId_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new InsertRegionAutoscalerRequest(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private InsertRegionAutoscalerRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 296879706: { java.lang.String s = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; requestId_ = s; break; } case 1111570338: { java.lang.String s = input.readStringRequireUtf8(); region_ = s; break; } case 1660928946: { com.google.cloud.compute.v1.Autoscaler.Builder subBuilder = null; if (autoscalerResource_ != null) { subBuilder = autoscalerResource_.toBuilder(); } autoscalerResource_ = input.readMessage( com.google.cloud.compute.v1.Autoscaler.parser(), extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(autoscalerResource_); autoscalerResource_ = subBuilder.buildPartial(); } break; } case 1820481738: { java.lang.String s = input.readStringRequireUtf8(); project_ = s; break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_InsertRegionAutoscalerRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_InsertRegionAutoscalerRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.compute.v1.InsertRegionAutoscalerRequest.class, com.google.cloud.compute.v1.InsertRegionAutoscalerRequest.Builder.class); } private int bitField0_; public static final int AUTOSCALER_RESOURCE_FIELD_NUMBER = 207616118; private com.google.cloud.compute.v1.Autoscaler autoscalerResource_; /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.Autoscaler autoscaler_resource = 207616118 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the autoscalerResource field is set. */ @java.lang.Override public boolean hasAutoscalerResource() { return autoscalerResource_ != null; } /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.Autoscaler autoscaler_resource = 207616118 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The autoscalerResource. */ @java.lang.Override public com.google.cloud.compute.v1.Autoscaler getAutoscalerResource() { return autoscalerResource_ == null ? com.google.cloud.compute.v1.Autoscaler.getDefaultInstance() : autoscalerResource_; } /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.Autoscaler autoscaler_resource = 207616118 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.compute.v1.AutoscalerOrBuilder getAutoscalerResourceOrBuilder() { return getAutoscalerResource(); } public static final int PROJECT_FIELD_NUMBER = 227560217; private volatile java.lang.Object project_; /** * * * <pre> * Project ID for this request. * </pre> * * <code> * string project = 227560217 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "project"]; * </code> * * @return The project. */ @java.lang.Override public java.lang.String getProject() { java.lang.Object ref = project_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); project_ = s; return s; } } /** * * * <pre> * Project ID for this request. * </pre> * * <code> * string project = 227560217 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "project"]; * </code> * * @return The bytes for project. */ @java.lang.Override public com.google.protobuf.ByteString getProjectBytes() { java.lang.Object ref = project_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); project_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int REGION_FIELD_NUMBER = 138946292; private volatile java.lang.Object region_; /** * * * <pre> * Name of the region scoping this request. * </pre> * * <code> * string region = 138946292 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "region"]; * </code> * * @return The region. */ @java.lang.Override public java.lang.String getRegion() { java.lang.Object ref = region_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); region_ = s; return s; } } /** * * * <pre> * Name of the region scoping this request. * </pre> * * <code> * string region = 138946292 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "region"]; * </code> * * @return The bytes for region. */ @java.lang.Override public com.google.protobuf.ByteString getRegionBytes() { java.lang.Object ref = region_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); region_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int REQUEST_ID_FIELD_NUMBER = 37109963; private volatile java.lang.Object requestId_; /** * * * <pre> * An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). * </pre> * * <code>optional string request_id = 37109963;</code> * * @return Whether the requestId field is set. */ @java.lang.Override public boolean hasRequestId() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). * </pre> * * <code>optional string request_id = 37109963;</code> * * @return The requestId. */ @java.lang.Override public java.lang.String getRequestId() { java.lang.Object ref = requestId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); requestId_ = s; return s; } } /** * * * <pre> * An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). * </pre> * * <code>optional string request_id = 37109963;</code> * * @return The bytes for requestId. */ @java.lang.Override public com.google.protobuf.ByteString getRequestIdBytes() { java.lang.Object ref = requestId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); requestId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 37109963, requestId_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(region_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 138946292, region_); } if (autoscalerResource_ != null) { output.writeMessage(207616118, getAutoscalerResource()); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(project_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 227560217, project_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(37109963, requestId_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(region_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(138946292, region_); } if (autoscalerResource_ != null) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 207616118, getAutoscalerResource()); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(project_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(227560217, project_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.compute.v1.InsertRegionAutoscalerRequest)) { return super.equals(obj); } com.google.cloud.compute.v1.InsertRegionAutoscalerRequest other = (com.google.cloud.compute.v1.InsertRegionAutoscalerRequest) obj; if (hasAutoscalerResource() != other.hasAutoscalerResource()) return false; if (hasAutoscalerResource()) { if (!getAutoscalerResource().equals(other.getAutoscalerResource())) return false; } if (!getProject().equals(other.getProject())) return false; if (!getRegion().equals(other.getRegion())) return false; if (hasRequestId() != other.hasRequestId()) return false; if (hasRequestId()) { if (!getRequestId().equals(other.getRequestId())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasAutoscalerResource()) { hash = (37 * hash) + AUTOSCALER_RESOURCE_FIELD_NUMBER; hash = (53 * hash) + getAutoscalerResource().hashCode(); } hash = (37 * hash) + PROJECT_FIELD_NUMBER; hash = (53 * hash) + getProject().hashCode(); hash = (37 * hash) + REGION_FIELD_NUMBER; hash = (53 * hash) + getRegion().hashCode(); if (hasRequestId()) { hash = (37 * hash) + REQUEST_ID_FIELD_NUMBER; hash = (53 * hash) + getRequestId().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.compute.v1.InsertRegionAutoscalerRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.InsertRegionAutoscalerRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.InsertRegionAutoscalerRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.InsertRegionAutoscalerRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.InsertRegionAutoscalerRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.InsertRegionAutoscalerRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.InsertRegionAutoscalerRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.InsertRegionAutoscalerRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.compute.v1.InsertRegionAutoscalerRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.InsertRegionAutoscalerRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.compute.v1.InsertRegionAutoscalerRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.InsertRegionAutoscalerRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.compute.v1.InsertRegionAutoscalerRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * A request message for RegionAutoscalers.Insert. See the method description for details. * </pre> * * Protobuf type {@code google.cloud.compute.v1.InsertRegionAutoscalerRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.compute.v1.InsertRegionAutoscalerRequest) com.google.cloud.compute.v1.InsertRegionAutoscalerRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_InsertRegionAutoscalerRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_InsertRegionAutoscalerRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.compute.v1.InsertRegionAutoscalerRequest.class, com.google.cloud.compute.v1.InsertRegionAutoscalerRequest.Builder.class); } // Construct using com.google.cloud.compute.v1.InsertRegionAutoscalerRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); if (autoscalerResourceBuilder_ == null) { autoscalerResource_ = null; } else { autoscalerResource_ = null; autoscalerResourceBuilder_ = null; } project_ = ""; region_ = ""; requestId_ = ""; bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_InsertRegionAutoscalerRequest_descriptor; } @java.lang.Override public com.google.cloud.compute.v1.InsertRegionAutoscalerRequest getDefaultInstanceForType() { return com.google.cloud.compute.v1.InsertRegionAutoscalerRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.compute.v1.InsertRegionAutoscalerRequest build() { com.google.cloud.compute.v1.InsertRegionAutoscalerRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.compute.v1.InsertRegionAutoscalerRequest buildPartial() { com.google.cloud.compute.v1.InsertRegionAutoscalerRequest result = new com.google.cloud.compute.v1.InsertRegionAutoscalerRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (autoscalerResourceBuilder_ == null) { result.autoscalerResource_ = autoscalerResource_; } else { result.autoscalerResource_ = autoscalerResourceBuilder_.build(); } result.project_ = project_; result.region_ = region_; if (((from_bitField0_ & 0x00000001) != 0)) { to_bitField0_ |= 0x00000001; } result.requestId_ = requestId_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.compute.v1.InsertRegionAutoscalerRequest) { return mergeFrom((com.google.cloud.compute.v1.InsertRegionAutoscalerRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.compute.v1.InsertRegionAutoscalerRequest other) { if (other == com.google.cloud.compute.v1.InsertRegionAutoscalerRequest.getDefaultInstance()) return this; if (other.hasAutoscalerResource()) { mergeAutoscalerResource(other.getAutoscalerResource()); } if (!other.getProject().isEmpty()) { project_ = other.project_; onChanged(); } if (!other.getRegion().isEmpty()) { region_ = other.region_; onChanged(); } if (other.hasRequestId()) { bitField0_ |= 0x00000001; requestId_ = other.requestId_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.compute.v1.InsertRegionAutoscalerRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.compute.v1.InsertRegionAutoscalerRequest) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private com.google.cloud.compute.v1.Autoscaler autoscalerResource_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.compute.v1.Autoscaler, com.google.cloud.compute.v1.Autoscaler.Builder, com.google.cloud.compute.v1.AutoscalerOrBuilder> autoscalerResourceBuilder_; /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.Autoscaler autoscaler_resource = 207616118 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the autoscalerResource field is set. */ public boolean hasAutoscalerResource() { return autoscalerResourceBuilder_ != null || autoscalerResource_ != null; } /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.Autoscaler autoscaler_resource = 207616118 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The autoscalerResource. */ public com.google.cloud.compute.v1.Autoscaler getAutoscalerResource() { if (autoscalerResourceBuilder_ == null) { return autoscalerResource_ == null ? com.google.cloud.compute.v1.Autoscaler.getDefaultInstance() : autoscalerResource_; } else { return autoscalerResourceBuilder_.getMessage(); } } /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.Autoscaler autoscaler_resource = 207616118 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setAutoscalerResource(com.google.cloud.compute.v1.Autoscaler value) { if (autoscalerResourceBuilder_ == null) { if (value == null) { throw new NullPointerException(); } autoscalerResource_ = value; onChanged(); } else { autoscalerResourceBuilder_.setMessage(value); } return this; } /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.Autoscaler autoscaler_resource = 207616118 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setAutoscalerResource( com.google.cloud.compute.v1.Autoscaler.Builder builderForValue) { if (autoscalerResourceBuilder_ == null) { autoscalerResource_ = builderForValue.build(); onChanged(); } else { autoscalerResourceBuilder_.setMessage(builderForValue.build()); } return this; } /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.Autoscaler autoscaler_resource = 207616118 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeAutoscalerResource(com.google.cloud.compute.v1.Autoscaler value) { if (autoscalerResourceBuilder_ == null) { if (autoscalerResource_ != null) { autoscalerResource_ = com.google.cloud.compute.v1.Autoscaler.newBuilder(autoscalerResource_) .mergeFrom(value) .buildPartial(); } else { autoscalerResource_ = value; } onChanged(); } else { autoscalerResourceBuilder_.mergeFrom(value); } return this; } /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.Autoscaler autoscaler_resource = 207616118 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearAutoscalerResource() { if (autoscalerResourceBuilder_ == null) { autoscalerResource_ = null; onChanged(); } else { autoscalerResource_ = null; autoscalerResourceBuilder_ = null; } return this; } /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.Autoscaler autoscaler_resource = 207616118 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.compute.v1.Autoscaler.Builder getAutoscalerResourceBuilder() { onChanged(); return getAutoscalerResourceFieldBuilder().getBuilder(); } /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.Autoscaler autoscaler_resource = 207616118 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.compute.v1.AutoscalerOrBuilder getAutoscalerResourceOrBuilder() { if (autoscalerResourceBuilder_ != null) { return autoscalerResourceBuilder_.getMessageOrBuilder(); } else { return autoscalerResource_ == null ? com.google.cloud.compute.v1.Autoscaler.getDefaultInstance() : autoscalerResource_; } } /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.Autoscaler autoscaler_resource = 207616118 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.compute.v1.Autoscaler, com.google.cloud.compute.v1.Autoscaler.Builder, com.google.cloud.compute.v1.AutoscalerOrBuilder> getAutoscalerResourceFieldBuilder() { if (autoscalerResourceBuilder_ == null) { autoscalerResourceBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.compute.v1.Autoscaler, com.google.cloud.compute.v1.Autoscaler.Builder, com.google.cloud.compute.v1.AutoscalerOrBuilder>( getAutoscalerResource(), getParentForChildren(), isClean()); autoscalerResource_ = null; } return autoscalerResourceBuilder_; } private java.lang.Object project_ = ""; /** * * * <pre> * Project ID for this request. * </pre> * * <code> * string project = 227560217 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "project"]; * </code> * * @return The project. */ public java.lang.String getProject() { java.lang.Object ref = project_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); project_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Project ID for this request. * </pre> * * <code> * string project = 227560217 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "project"]; * </code> * * @return The bytes for project. */ public com.google.protobuf.ByteString getProjectBytes() { java.lang.Object ref = project_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); project_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Project ID for this request. * </pre> * * <code> * string project = 227560217 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "project"]; * </code> * * @param value The project to set. * @return This builder for chaining. */ public Builder setProject(java.lang.String value) { if (value == null) { throw new NullPointerException(); } project_ = value; onChanged(); return this; } /** * * * <pre> * Project ID for this request. * </pre> * * <code> * string project = 227560217 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "project"]; * </code> * * @return This builder for chaining. */ public Builder clearProject() { project_ = getDefaultInstance().getProject(); onChanged(); return this; } /** * * * <pre> * Project ID for this request. * </pre> * * <code> * string project = 227560217 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "project"]; * </code> * * @param value The bytes for project to set. * @return This builder for chaining. */ public Builder setProjectBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); project_ = value; onChanged(); return this; } private java.lang.Object region_ = ""; /** * * * <pre> * Name of the region scoping this request. * </pre> * * <code> * string region = 138946292 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "region"]; * </code> * * @return The region. */ public java.lang.String getRegion() { java.lang.Object ref = region_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); region_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Name of the region scoping this request. * </pre> * * <code> * string region = 138946292 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "region"]; * </code> * * @return The bytes for region. */ public com.google.protobuf.ByteString getRegionBytes() { java.lang.Object ref = region_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); region_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Name of the region scoping this request. * </pre> * * <code> * string region = 138946292 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "region"]; * </code> * * @param value The region to set. * @return This builder for chaining. */ public Builder setRegion(java.lang.String value) { if (value == null) { throw new NullPointerException(); } region_ = value; onChanged(); return this; } /** * * * <pre> * Name of the region scoping this request. * </pre> * * <code> * string region = 138946292 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "region"]; * </code> * * @return This builder for chaining. */ public Builder clearRegion() { region_ = getDefaultInstance().getRegion(); onChanged(); return this; } /** * * * <pre> * Name of the region scoping this request. * </pre> * * <code> * string region = 138946292 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "region"]; * </code> * * @param value The bytes for region to set. * @return This builder for chaining. */ public Builder setRegionBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); region_ = value; onChanged(); return this; } private java.lang.Object requestId_ = ""; /** * * * <pre> * An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). * </pre> * * <code>optional string request_id = 37109963;</code> * * @return Whether the requestId field is set. */ public boolean hasRequestId() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). * </pre> * * <code>optional string request_id = 37109963;</code> * * @return The requestId. */ public java.lang.String getRequestId() { java.lang.Object ref = requestId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); requestId_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). * </pre> * * <code>optional string request_id = 37109963;</code> * * @return The bytes for requestId. */ public com.google.protobuf.ByteString getRequestIdBytes() { java.lang.Object ref = requestId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); requestId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). * </pre> * * <code>optional string request_id = 37109963;</code> * * @param value The requestId to set. * @return This builder for chaining. */ public Builder setRequestId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; requestId_ = value; onChanged(); return this; } /** * * * <pre> * An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). * </pre> * * <code>optional string request_id = 37109963;</code> * * @return This builder for chaining. */ public Builder clearRequestId() { bitField0_ = (bitField0_ & ~0x00000001); requestId_ = getDefaultInstance().getRequestId(); onChanged(); return this; } /** * * * <pre> * An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). * </pre> * * <code>optional string request_id = 37109963;</code> * * @param value The bytes for requestId to set. * @return This builder for chaining. */ public Builder setRequestIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); bitField0_ |= 0x00000001; requestId_ = value; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.compute.v1.InsertRegionAutoscalerRequest) } // @@protoc_insertion_point(class_scope:google.cloud.compute.v1.InsertRegionAutoscalerRequest) private static final com.google.cloud.compute.v1.InsertRegionAutoscalerRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.compute.v1.InsertRegionAutoscalerRequest(); } public static com.google.cloud.compute.v1.InsertRegionAutoscalerRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<InsertRegionAutoscalerRequest> PARSER = new com.google.protobuf.AbstractParser<InsertRegionAutoscalerRequest>() { @java.lang.Override public InsertRegionAutoscalerRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new InsertRegionAutoscalerRequest(input, extensionRegistry); } }; public static com.google.protobuf.Parser<InsertRegionAutoscalerRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<InsertRegionAutoscalerRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.compute.v1.InsertRegionAutoscalerRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
package com.fasterxml.jackson.datatype.jdk8; import java.util.*; import com.fasterxml.jackson.annotation.*; import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility; import com.fasterxml.jackson.annotation.JsonTypeInfo.As; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.JavaType; import com.fasterxml.jackson.databind.ObjectMapper; public class OptionalBasicTest extends ModuleTestBase { public static final class OptionalData { public Optional<String> myString; } @JsonAutoDetect(fieldVisibility = Visibility.ANY) public static final class OptionalGenericData<T> { private Optional<T> myData; } @JsonIdentityInfo(generator=ObjectIdGenerators.IntSequenceGenerator.class) public static class Unit { public Optional<Unit> baseUnit; public Unit() { } public Unit(final Optional<Unit> u) { baseUnit = u; } public void link(final Unit u) { baseUnit = Optional.of(u); } } // To test handling of polymorphic value types public static class Container { public Optional<Contained> contained; } @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = As.PROPERTY) @JsonSubTypes({ @JsonSubTypes.Type(name = "ContainedImpl", value = ContainedImpl.class), }) public static interface Contained { } public static class ContainedImpl implements Contained { } /* /********************************************************** /* Test methods /********************************************************** */ private final ObjectMapper MAPPER = mapperWithModule(); public void testOptionalTypeResolution() throws Exception { // With 2.6, we need to recognize it as ReferenceType JavaType t = MAPPER.constructType(Optional.class); assertNotNull(t); assertEquals(Optional.class, t.getRawClass()); assertTrue(t.isReferenceType()); } public void testDeserAbsent() throws Exception { Optional<?> value = MAPPER.readValue("null", new TypeReference<Optional<String>>() { }); assertFalse(value.isPresent()); } public void testDeserSimpleString() throws Exception { Optional<?> value = MAPPER.readValue("\"simpleString\"", new TypeReference<Optional<String>>() { }); assertTrue(value.isPresent()); assertEquals("simpleString", value.get()); } public void testDeserInsideObject() throws Exception { OptionalData data = MAPPER.readValue("{\"myString\":\"simpleString\"}", OptionalData.class); assertTrue(data.myString.isPresent()); assertEquals("simpleString", data.myString.get()); } public void testDeserComplexObject() throws Exception { TypeReference<Optional<OptionalData>> type = new TypeReference<Optional<OptionalData>>() { }; Optional<OptionalData> data = MAPPER.readValue( "{\"myString\":\"simpleString\"}", type); assertTrue(data.isPresent()); assertTrue(data.get().myString.isPresent()); assertEquals("simpleString", data.get().myString.get()); } public void testDeserGeneric() throws Exception { TypeReference<Optional<OptionalGenericData<String>>> type = new TypeReference<Optional<OptionalGenericData<String>>>() { }; Optional<OptionalGenericData<String>> data = MAPPER.readValue( "{\"myData\":\"simpleString\"}", type); assertTrue(data.isPresent()); assertTrue(data.get().myData.isPresent()); assertEquals("simpleString", data.get().myData.get()); } public void testSerAbsent() throws Exception { String value = MAPPER.writeValueAsString(Optional.empty()); assertEquals("null", value); } public void testSerSimpleString() throws Exception { String value = MAPPER.writeValueAsString(Optional.of("simpleString")); assertEquals("\"simpleString\"", value); } public void testSerInsideObject() throws Exception { OptionalData data = new OptionalData(); data.myString = Optional.of("simpleString"); String value = MAPPER.writeValueAsString(data); assertEquals("{\"myString\":\"simpleString\"}", value); } public void testSerComplexObject() throws Exception { OptionalData data = new OptionalData(); data.myString = Optional.of("simpleString"); String value = MAPPER.writeValueAsString(Optional.of(data)); assertEquals("{\"myString\":\"simpleString\"}", value); } public void testSerGeneric() throws Exception { OptionalGenericData<String> data = new OptionalGenericData<String>(); data.myData = Optional.of("simpleString"); String value = MAPPER.writeValueAsString(Optional.of(data)); assertEquals("{\"myData\":\"simpleString\"}", value); } public void testSerNonNull() throws Exception { OptionalData data = new OptionalData(); data.myString = Optional.empty(); // NOTE: pass 'true' to ensure "legacy" setting String value = mapperWithModule(true).setSerializationInclusion( JsonInclude.Include.NON_NULL).writeValueAsString(data); assertEquals("{}", value); } public void testSerOptDefault() throws Exception { OptionalData data = new OptionalData(); data.myString = Optional.empty(); String value = mapperWithModule().setSerializationInclusion( JsonInclude.Include.ALWAYS).writeValueAsString(data); assertEquals("{\"myString\":null}", value); } public void testSerOptNull() throws Exception { OptionalData data = new OptionalData(); data.myString = null; String value = mapperWithModule().setSerializationInclusion( JsonInclude.Include.NON_NULL).writeValueAsString(data); assertEquals("{}", value); } public void testSerOptDisableAsNull() throws Exception { final OptionalData data = new OptionalData(); data.myString = Optional.empty(); Jdk8Module mod = new Jdk8Module().configureAbsentsAsNulls(false); ObjectMapper mapper = new ObjectMapper().registerModule(mod) .setSerializationInclusion(JsonInclude.Include.NON_NULL); assertEquals("{\"myString\":null}", mapper.writeValueAsString(data)); // but do exclude with NON_EMPTY mapper = new ObjectMapper().registerModule(mod) .setSerializationInclusion(JsonInclude.Include.NON_EMPTY); assertEquals("{}", mapper.writeValueAsString(data)); // and with new (2.6) NON_ABSENT mapper = new ObjectMapper().registerModule(mod) .setSerializationInclusion(JsonInclude.Include.NON_ABSENT); assertEquals("{}", mapper.writeValueAsString(data)); } public void testSerOptNonEmpty() throws Exception { OptionalData data = new OptionalData(); data.myString = null; String value = mapperWithModule().setSerializationInclusion( JsonInclude.Include.NON_EMPTY).writeValueAsString(data); assertEquals("{}", value); } public void testWithTypingEnabled() throws Exception { final ObjectMapper objectMapper = mapperWithModule(); // ENABLE TYPING objectMapper .enableDefaultTyping(ObjectMapper.DefaultTyping.OBJECT_AND_NON_CONCRETE); final OptionalData myData = new OptionalData(); myData.myString = Optional.ofNullable("abc"); final String json = objectMapper.writeValueAsString(myData); final OptionalData deserializedMyData = objectMapper.readValue(json, OptionalData.class); assertEquals(myData.myString, deserializedMyData.myString); } public void testObjectId() throws Exception { final Unit input = new Unit(); input.link(input); String json = MAPPER.writeValueAsString(input); Unit result = MAPPER.readValue(json, Unit.class); assertNotNull(result); assertNotNull(result.baseUnit); assertTrue(result.baseUnit.isPresent()); Unit base = result.baseUnit.get(); assertSame(result, base); } public void testOptionalCollection() throws Exception { TypeReference<List<Optional<String>>> typeReference = new TypeReference<List<Optional<String>>>() { }; List<Optional<String>> list = new ArrayList<Optional<String>>(); list.add(Optional.of("2014-1-22")); list.add(Optional.<String> empty()); list.add(Optional.of("2014-1-23")); String str = MAPPER.writeValueAsString(list); assertEquals("[\"2014-1-22\",null,\"2014-1-23\"]", str); List<Optional<String>> result = MAPPER.readValue(str, typeReference); assertEquals(list.size(), result.size()); for (int i = 0; i < list.size(); ++i) { assertEquals("Entry #" + i, list.get(i), result.get(i)); } } public void testPolymorphic() throws Exception { final Container dto = new Container(); dto.contained = Optional.of((Contained) new ContainedImpl()); final String json = MAPPER.writeValueAsString(dto); final Container fromJson = MAPPER.readValue(json, Container.class); assertNotNull(fromJson.contained); assertTrue(fromJson.contained.isPresent()); assertSame(ContainedImpl.class, fromJson.contained.get().getClass()); } }
package kr.pe.kwonnam.freemarkerdynamicqlbuilder; import freemarker.template.Configuration; import freemarker.template.TemplateDirectiveModel; import freemarker.template.TemplateModelException; import kr.pe.kwonnam.freemarkerdynamicqlbuilder.directives.SetDirective; import kr.pe.kwonnam.freemarkerdynamicqlbuilder.directives.TrimDirective; import kr.pe.kwonnam.freemarkerdynamicqlbuilder.directives.WhereDirective; import kr.pe.kwonnam.freemarkerdynamicqlbuilder.objectunwrapper.TemplateModelObjectUnwrapper; import kr.pe.kwonnam.freemarkerdynamicqlbuilder.paramconverter.ParameterConverter; import org.hamcrest.Matchers; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.ArgumentCaptor; import org.mockito.Captor; import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.runners.MockitoJUnitRunner; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.HashMap; import java.util.Map; import static org.hamcrest.CoreMatchers.*; import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.core.IsCollectionContaining.hasItems; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.verify; @RunWith(MockitoJUnitRunner.class) public class FreemarkerDynamicQlBuilderFactoryTest { private Logger log = LoggerFactory.getLogger(FreemarkerDynamicQlBuilderFactoryTest.class); @Mock private Configuration freemarkerConfiguration; @Mock private ParameterConverter parameterConverter1; @Mock private ParameterConverter parameterConverter2; @Mock private TemplateModelObjectUnwrapper templateModelObjectUnwrapper; @Captor private ArgumentCaptor<Map<String, TemplateDirectiveModel>> templateDirectivesCaptor; private FreemarkerDynamicQlBuilderFactory factory; @Before public void setUp() throws Exception { factory = new FreemarkerDynamicQlBuilderFactory(freemarkerConfiguration); } @Test public void constructor_freemarkerConfiguration_null() throws Exception { try { new FreemarkerDynamicQlBuilderFactory(null); fail("Must throw an exception"); } catch (IllegalArgumentException ex) { assertThat("Must throw an exception", ex.getMessage(), containsString("freemarkerConfiguration must not be null.")); } } @Test public void qlDirectivePrefix_null() throws Exception { try { factory.qlDirectivePrefix(null); fail("Must throw an exception"); } catch (IllegalArgumentException ex) { assertThat("Must throw an exception", ex.getMessage(), containsString("qlDirectivePrefix must not be null or empty.")); } } @Test public void qlDirectivePrefix_empty() throws Exception { try { factory.qlDirectivePrefix(""); fail("Must throw an exception"); } catch (IllegalArgumentException ex) { assertThat("Must throw an exception", ex.getMessage(), containsString("qlDirectivePrefix must not be null or empty.")); } } @Test public void paramMethodName_null() throws Exception { try { factory.paramMethodName(null); fail("Must throw an exception"); } catch (IllegalArgumentException ex) { assertThat("Must throw an exception", ex.getMessage(), containsString("paramMethodName must not be null or empty.")); } } @Test public void paramMethodName_empty() throws Exception { try { factory.paramMethodName(""); fail("Must throw an exception"); } catch (IllegalArgumentException ex) { assertThat("Must throw an exception", ex.getMessage(), containsString("paramMethodName must not be null or empty.")); } } @Test public void queryTemplateNamePostfix_null() throws Exception { try { factory.queryTemplateNamePostfix(null); fail("Must throw an exception - IllegalArgumentException"); } catch (IllegalArgumentException ex) { assertThat("Must throw an exception", ex.getMessage(), is("queryTemplateNamePostfix must not be null.")); } } @Test public void templateModelObjectUnwrapper_null() throws Exception { try { factory.templateModelObjectUnwrapper(null); fail("Must throw an exception - IllegalArgumentException"); } catch (IllegalArgumentException ex) { assertThat("Must throw an exception", ex.getMessage(), is("templateModelObjectUnwrapper must not be null.")); } } @Test public void addParameterConverter_key_null() throws Exception { try { factory.addParameterConverter(null, parameterConverter1); fail("Must throw an exception - IllegalArgumentException"); } catch (IllegalArgumentException ex) { assertThat("Must throw an exception", ex.getMessage(), is("parameterConverterName must not be null or empty.")); } } @Test public void addParameterConverter_key_empty() throws Exception { try { factory.addParameterConverter("", parameterConverter1); fail("Must throw an exception - IllegalArgumentException"); } catch (IllegalArgumentException ex) { assertThat("Must throw an exception", ex.getMessage(), is("parameterConverterName must not be null or empty.")); } } @Test public void addParameterConverter_value_null() throws Exception { try { factory.addParameterConverter("pc1", null); fail("Must throw an exception - IllegalArgumentException"); } catch (IllegalArgumentException ex) { assertThat("Must throw an exception", ex.getMessage(), is("parameterConverter must not be null.")); } } @Test public void addParameterConverter_duplicated_key() throws Exception { factory.addParameterConverter("parameterConverterName", parameterConverter1); try { factory.addParameterConverter("parameterConverterName", parameterConverter2); fail("Must throw an exception - IllegalArgumentException"); } catch (IllegalArgumentException ex) { assertThat("Must throw an exception", ex.getMessage(), is("parameterConverterName - parameterConverterName is duplicated.")); } } @Test public void addAllParameterConverters_null() throws Exception { assertThat(factory.getParameterConverters().size(), is(0)); factory.addAllParameterConverters(null); assertThat(factory.getParameterConverters().size(), is(0)); } @Test public void addAllParameterConverters() throws Exception { assertThat(factory.getParameterConverters().size(), is(0)); Map<String, ParameterConverter> otherParameterConverters = new HashMap<String, ParameterConverter>(); otherParameterConverters.put("pc1", parameterConverter1); otherParameterConverters.put("pc2", parameterConverter2); factory.addAllParameterConverters(otherParameterConverters); assertThat(factory.getParameterConverters().size(), is(2)); assertThat(factory.getParameterConverters().keySet(), hasItems("pc1", "pc2")); } @Test public void clearParameterConverters() throws Exception { factory.addParameterConverter("1", parameterConverter1); factory.addParameterConverter("2", parameterConverter2); factory.addParameterConverter("3", parameterConverter1); factory.addParameterConverter("4", parameterConverter2); assertThat(factory.getParameterConverters().size(), is(4)); factory.clearParameterConverters(); assertThat(factory.getParameterConverters().size(), is(0)); } @Test public void getFreemarkerDynamicQlBuilder() throws Exception { FreemarkerDynamicQlBuilderImpl freemarkerDynamicQlBuilder = (FreemarkerDynamicQlBuilderImpl) factory .qlDirectivePrefix("Q") .paramMethodName("P") .queryTemplateNamePostfix(".QL.FTL") .templateModelObjectUnwrapper(templateModelObjectUnwrapper) .addParameterConverter("pc1", parameterConverter1) .addParameterConverter("pc2", parameterConverter2) .getFreemarkerDynamicQlBuilder(); assertThat(freemarkerDynamicQlBuilder.getFreemarkerConfiguration(), sameInstance(freemarkerConfiguration)); assertThat(freemarkerDynamicQlBuilder.getQlDirectivePrefix(), is("Q")); assertThat(freemarkerDynamicQlBuilder.getParamMethodName(), is("P")); assertThat(freemarkerDynamicQlBuilder.getQueryTemplateNamePostfix(), is(".QL.FTL")); assertThat(freemarkerDynamicQlBuilder.getTemplateModelObjectUnwrapper(), sameInstance(templateModelObjectUnwrapper)); assertThat(freemarkerDynamicQlBuilder.getParameterConverters().size(), is(2)); assertThat(freemarkerDynamicQlBuilder.getParameterConverters().values(), hasItems(parameterConverter1, parameterConverter2)); assertThat(freemarkerDynamicQlBuilder.getParameterConverters().keySet(), hasItems("pc1", "pc2")); verifyTemplateDirectiveRegistration(); } private void verifyTemplateDirectiveRegistration() throws TemplateModelException { verify(freemarkerConfiguration).setSharedVariable(eq("Q"), templateDirectivesCaptor.capture()); Map<String, TemplateDirectiveModel> templateDirectives = templateDirectivesCaptor.getValue(); assertThat(templateDirectives.keySet(), hasSize(3)); assertThat(templateDirectives.keySet(), hasItems(SetDirective.DIRECTIVE_NAME, TrimDirective.DIRECTIVE_NAME, WhereDirective.DIRECTIVE_NAME)); } }
/* * Copyright 2012 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import static com.google.common.base.Preconditions.checkState; import com.google.common.base.Joiner; import com.google.common.collect.ImmutableList; import com.google.javascript.jscomp.NodeTraversal.AbstractPostOrderCallback; import com.google.javascript.rhino.IR; import com.google.javascript.rhino.JSDocInfo; import com.google.javascript.rhino.JSDocInfo.Visibility; import com.google.javascript.rhino.JSDocInfoBuilder; import com.google.javascript.rhino.JSTypeExpression; import com.google.javascript.rhino.Node; import com.google.javascript.rhino.Token; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; /** * Rewrites "goog.defineClass" into a form that is suitable for * type checking and dead code elimination. * * @author johnlenz@google.com (John Lenz) */ class ClosureRewriteClass extends AbstractPostOrderCallback implements HotSwapCompilerPass { // Errors static final DiagnosticType GOOG_CLASS_TARGET_INVALID = DiagnosticType.error( "JSC_GOOG_CLASS_TARGET_INVALID", "Unsupported class definition expression."); static final DiagnosticType GOOG_CLASS_SUPER_CLASS_NOT_VALID = DiagnosticType.error( "JSC_GOOG_CLASS_SUPER_CLASS_NOT_VALID", "The super class must be null or a valid name reference"); static final DiagnosticType GOOG_CLASS_DESCRIPTOR_NOT_VALID = DiagnosticType.error( "JSC_GOOG_CLASS_DESCRIPTOR_NOT_VALID", "The class must be defined by an object literal"); static final DiagnosticType GOOG_CLASS_CONSTRUCTOR_MISSING = DiagnosticType.error( "JSC_GOOG_CLASS_CONSTRUCTOR_MISSING", "The 'constructor' property is missing for the class definition"); static final DiagnosticType GOOG_CLASS_CONSTRUCTOR_NOT_VALID = DiagnosticType.error( "JSC_GOOG_CLASS_CONSTRUCTOR_NOT_VALID", "The 'constructor' expression must be a function literal"); static final DiagnosticType GOOG_CLASS_CONSTRUCTOR_ON_INTERFACE = DiagnosticType.error( "JSC_GOOG_CLASS_CONSTRUCTOR_ON_INTERFACE", "An interface definition should not have a 'constructor' property"); static final DiagnosticType GOOG_CLASS_STATICS_NOT_VALID = DiagnosticType.error( "JSC_GOOG_CLASS_STATICS_NOT_VALID", "The class 'statics' property must be an object or function literal"); static final DiagnosticType GOOG_CLASS_UNEXPECTED_PARAMS = DiagnosticType.error( "JSC_GOOG_CLASS_UNEXPECTED_PARAMS", "Too many arguments to goog.defineClass."); static final DiagnosticType GOOG_CLASS_ES6_COMPUTED_PROP_NAMES_NOT_SUPPORTED = DiagnosticType.error( "JSC_GOOG_CLASS_ES6_COMPUTED_PROP_NAMES_NOT_SUPPORTED", "Computed property names not supported in goog.defineClass."); static final DiagnosticType GOOG_CLASS_ES6_SHORTHAND_ASSIGNMENT_NOT_SUPPORTED = DiagnosticType.error( "JSC_GOOG_CLASS_ES6_SHORTHAND_ASSIGNMENT_NOT_SUPPORTED", "Shorthand assignments not supported in goog.defineClass."); static final DiagnosticType GOOG_CLASS_ES6_ARROW_FUNCTION_NOT_SUPPORTED = DiagnosticType.error( "JSC_GOOG_CLASS_ES6_ARROW_FUNCTION_NOT_SUPPORTED", "Arrow functions not supported in goog.defineClass. Object literal method" + " definition may be an alternative."); // Warnings static final DiagnosticType GOOG_CLASS_NG_INJECT_ON_CLASS = DiagnosticType.warning( "JSC_GOOG_CLASS_NG_INJECT_ON_CLASS", "@ngInject should be declared on the constructor, not on the class."); private final AbstractCompiler compiler; public ClosureRewriteClass(AbstractCompiler compiler) { this.compiler = compiler; } @Override public void process(Node externs, Node root) { hotSwapScript(root, null); } @Override public void hotSwapScript(Node scriptRoot, Node originalRoot) { NodeTraversal.traverseEs6(compiler, scriptRoot, this); } @Override public void visit(NodeTraversal t, Node n, Node parent) { if (n.isCall() && isGoogDefineClass(n) && !validateUsage(n)) { compiler.report(JSError.make(n, GOOG_CLASS_TARGET_INVALID)); } maybeRewriteClassDefinition(t, n); } private boolean validateUsage(Node n) { // There are only three valid usage patterns for of goog.defineClass // var ClassName = googDefineClass // namespace.ClassName = googDefineClass // and within an objectlit, used by the goog.defineClass. Node parent = n.getParent(); switch (parent.getToken()) { case NAME: return true; case ASSIGN: return n == parent.getLastChild() && parent.getParent().isExprResult(); case STRING_KEY: return isContainedInGoogDefineClass(parent); default: break; } return false; } private boolean isContainedInGoogDefineClass(Node n) { while (n != null) { n = n.getParent(); if (n.isCall()) { if (isGoogDefineClass(n)) { return true; } } else if (!n.isObjectLit() && !n.isStringKey()) { break; } } return false; } private void maybeRewriteClassDefinition(NodeTraversal t, Node n) { if (NodeUtil.isNameDeclaration(n)) { Node target = n.getFirstChild(); Node value = target.getFirstChild(); maybeRewriteClassDefinition(t, n, target, value); } else if (NodeUtil.isExprAssign(n)) { Node assign = n.getFirstChild(); Node target = assign.getFirstChild(); Node value = assign.getLastChild(); maybeRewriteClassDefinition(t, n, target, value); } } private void maybeRewriteClassDefinition( NodeTraversal t, Node n, Node target, Node value) { if (isGoogDefineClass(value)) { if (!target.isQualifiedName()) { compiler.report(JSError.make(n, GOOG_CLASS_TARGET_INVALID)); } ClassDefinition def = extractClassDefinition(target, value); if (def != null) { value.detach(); target.detach(); rewriteGoogDefineClass(t, n, def); } } } private static class MemberDefinition { final JSDocInfo info; final Node name; final Node value; MemberDefinition(JSDocInfo info, Node name, Node value) { this.info = info; this.name = name; this.value = value; } } private static final class ClassDefinition { final Node name; final JSDocInfo classInfo; final Node superClass; final MemberDefinition constructor; final List<MemberDefinition> staticProps; final List<MemberDefinition> props; final Node classModifier; ClassDefinition( Node name, JSDocInfo classInfo, Node superClass, MemberDefinition constructor, List<MemberDefinition> staticProps, List<MemberDefinition> props, Node classModifier) { this.name = name; this.classInfo = classInfo; this.superClass = superClass; this.constructor = constructor; this.staticProps = staticProps; this.props = props; this.classModifier = classModifier; } } /** * Validates the class definition and if valid, destructively extracts * the class definition from the AST. */ private ClassDefinition extractClassDefinition( Node targetName, Node callNode) { JSDocInfo classInfo = NodeUtil.getBestJSDocInfo(targetName); // name = goog.defineClass(superClass, {...}, [modifier, ...]) Node superClass = NodeUtil.getArgumentForCallOrNew(callNode, 0); if (superClass == null || (!superClass.isNull() && !superClass.isQualifiedName())) { compiler.report(JSError.make(callNode, GOOG_CLASS_SUPER_CLASS_NOT_VALID)); return null; } if (NodeUtil.isNullOrUndefined(superClass) || superClass.matchesQualifiedName("Object")) { superClass = null; } Node description = NodeUtil.getArgumentForCallOrNew(callNode, 1); if (!validateObjLit(description, callNode)) { // Errors will be reported in the validate method. Keeping here clean return null; } int paramCount = callNode.getChildCount() - 1; if (paramCount > 2) { compiler.report(JSError.make(callNode, GOOG_CLASS_UNEXPECTED_PARAMS)); return null; } Node constructor = extractProperty(description, "constructor"); if (classInfo != null && classInfo.isInterface()) { if (constructor != null) { compiler.report(JSError.make(description, GOOG_CLASS_CONSTRUCTOR_ON_INTERFACE)); return null; } } else if (constructor == null) { // report missing constructor compiler.report(JSError.make(description, GOOG_CLASS_CONSTRUCTOR_MISSING)); return null; } else { if (!constructor.isFunction()) { compiler.report(JSError.make(constructor, GOOG_CLASS_CONSTRUCTOR_NOT_VALID)); } } if (constructor == null) { constructor = IR.function( IR.name("").srcref(callNode), IR.paramList().srcref(callNode), IR.block().srcref(callNode)); constructor.srcref(callNode); compiler.reportChangeToChangeScope(constructor); } JSDocInfo info = NodeUtil.getBestJSDocInfo(constructor); Node classModifier = null; Node statics = null; Node staticsProp = extractProperty(description, "statics"); if (staticsProp != null) { if (staticsProp.isObjectLit()){ if (!validateObjLit(staticsProp, staticsProp.getParent())) { // Errors will be reported in the validate method. Keeping here clean return null; } statics = staticsProp; } else if (staticsProp.isFunction()) { classModifier = staticsProp; } else { compiler.report( JSError.make(staticsProp, GOOG_CLASS_STATICS_NOT_VALID)); return null; } } if (statics == null) { statics = IR.objectlit(); } // Ok, now rip apart the definition into its component pieces. // Remove the "special" property key nodes. maybeDetach(constructor.getParent()); maybeDetach(statics.getParent()); if (classModifier != null) { maybeDetach(classModifier.getParent()); } ClassDefinition def = new ClassDefinition( targetName, classInfo, maybeDetach(superClass), new MemberDefinition(info, null, maybeDetach(constructor)), objectLitToList(maybeDetach(statics)), objectLitToList(description), maybeDetach(classModifier)); return def; } private static Node maybeDetach(Node node) { if (node != null && node.getParent() != null) { node.detach(); } return node; } /** * @param objlit the object literal being checked. * @param parent the parent of the object literal node * @return false if the node is not an object literal, or if it contains any * property that is neither unquoted plain property nor member * function definition (ES6 feature) */ private boolean validateObjLit(Node objlit, Node parent) { if (objlit == null || !objlit.isObjectLit()) { reportErrorOnContext(parent); return false; } for (Node key : objlit.children()) { if (key.isMemberFunctionDef()) { continue; } if (key.isComputedProp()) { // report using computed property name compiler.report(JSError.make(objlit, GOOG_CLASS_ES6_COMPUTED_PROP_NAMES_NOT_SUPPORTED)); return false; } if (key.isStringKey() && !key.hasChildren()) { // report using shorthand assignment compiler.report(JSError.make(objlit, GOOG_CLASS_ES6_SHORTHAND_ASSIGNMENT_NOT_SUPPORTED)); return false; } if (key.isStringKey() && key.hasChildren() && key.getFirstChild().isArrowFunction()){ // report using arrow function compiler.report(JSError.make(objlit, GOOG_CLASS_ES6_ARROW_FUNCTION_NOT_SUPPORTED)); return false; } if (!key.isStringKey() || key.isQuotedString()) { reportErrorOnContext(parent); return false; } } return true; } private void reportErrorOnContext(Node parent){ if (parent.isStringKey()){ compiler.report(JSError.make(parent, GOOG_CLASS_STATICS_NOT_VALID)); } else { // Report error in the context that the objlit is an // argument of goog.defineClass call. checkState(parent.isCall()); compiler.report(JSError.make(parent, GOOG_CLASS_DESCRIPTOR_NOT_VALID)); } } /** * @return The first property in the objlit that matches the key. */ private static Node extractProperty(Node objlit, String keyName) { for (Node keyNode : objlit.children()) { if (keyNode.getString().equals(keyName)) { return keyNode.getFirstChild(); } } return null; } private static List<MemberDefinition> objectLitToList( Node objlit) { List<MemberDefinition> result = new ArrayList<>(); for (Node keyNode : objlit.children()) { Node name = keyNode; // The span of a member function def is the whole function. The NAME node should be the // first-first child, which will have a span for just the name of the function. if (keyNode.isMemberFunctionDef()) { name = keyNode.getFirstFirstChild().cloneNode(); name.setString(keyNode.getString()); } result.add( new MemberDefinition( NodeUtil.getBestJSDocInfo(keyNode), name, keyNode.removeFirstChild())); } objlit.detachChildren(); return result; } private void rewriteGoogDefineClass(NodeTraversal t, Node exprRoot, final ClassDefinition cls) { // For simplicity add everything into a block, before adding it to the AST. Node block = IR.block(); // remove the original jsdoc info if it was attached to the value. cls.constructor.value.setJSDocInfo(null); if (NodeUtil.isNameDeclaration(exprRoot)) { // example: var ctr = function(){} Node decl = IR.declaration(cls.name.cloneTree(), cls.constructor.value, exprRoot.getToken()) .srcref(exprRoot); JSDocInfo mergedClassInfo = mergeJsDocFor(cls, decl); decl.setJSDocInfo(mergedClassInfo); block.addChildToBack(decl); } else { // example: ns.ctr = function(){} Node assign = IR.assign(cls.name.cloneTree(), cls.constructor.value) .srcref(exprRoot) .setJSDocInfo(cls.constructor.info); JSDocInfo mergedClassInfo = mergeJsDocFor(cls, assign); assign.setJSDocInfo(mergedClassInfo); Node expr = IR.exprResult(assign).srcref(exprRoot); block.addChildToBack(expr); } if (cls.superClass != null) { // example: goog.inherits(ctr, superClass) block.addChildToBack( fixupSrcref(IR.exprResult( IR.call( NodeUtil.newQName(compiler, "goog.inherits") .srcrefTree(cls.superClass), cls.name.cloneTree(), cls.superClass.cloneTree()).srcref(cls.superClass)))); } for (MemberDefinition def : cls.staticProps) { // remove the original jsdoc info if it was attached to the value. def.value.setJSDocInfo(null); // example: ctr.prop = value block.addChildToBack( fixupSrcref(IR.exprResult( fixupSrcref(IR.assign( IR.getprop(cls.name.cloneTree(), IR.string(def.name.getString()).srcref(def.name)) .srcref(def.name), def.value)).setJSDocInfo(def.info)))); // Handle inner class definitions. maybeRewriteClassDefinition(t, block.getLastChild()); } for (MemberDefinition def : cls.props) { // remove the original jsdoc info if it was attached to the value. def.value.setJSDocInfo(null); // example: ctr.prototype.prop = value Node exprResult = IR.exprResult( IR.assign( NodeUtil.newQName( compiler, cls.name.getQualifiedName() + ".prototype." + def.name.getString()), def.value) .setJSDocInfo(def.info)); exprResult.useSourceInfoIfMissingFromForTree(def.name); // The length needs to be set explicitly to include the string key node and the function node. // If we just used the length of def.name or def.value alone, then refactorings which try to // delete the method would not work correctly. exprResult.setLength( def.value.getSourceOffset() + def.value.getLength() - def.name.getSourceOffset()); block.addChildToBack(exprResult); // Handle inner class definitions. maybeRewriteClassDefinition(t, block.getLastChild()); } if (cls.classModifier != null) { // Inside the modifier function, replace references to the argument // with the class name. // function(cls) { cls.Foo = bar; } // becomes // function(cls) { theClassName.Foo = bar; } // The cls parameter is unused, but leave it there so that it // matches the JsDoc. // TODO(tbreisacher): Add a warning if the param is shadowed or reassigned. Node argList = cls.classModifier.getSecondChild(); Node arg = argList.getFirstChild(); final String argName = arg.getString(); NodeTraversal.traverseEs6( compiler, cls.classModifier.getLastChild(), new AbstractPostOrderCallback() { @Override public void visit(NodeTraversal t, Node n, Node parent) { if (n.isName() && n.getString().equals(argName)) { Node newName = cls.name.cloneTree(); parent.replaceChild(n, newName); compiler.reportChangeToEnclosingScope(newName); } } }); block.addChildToBack( IR.exprResult( fixupFreeCall( IR.call( cls.classModifier, cls.name.cloneTree()) .srcref(cls.classModifier))) .srcref(cls.classModifier)); } Node parent = exprRoot.getParent(); Node stmts = block.removeChildren(); parent.addChildrenAfter(stmts, exprRoot); parent.removeChild(exprRoot); // compiler.reportChangeToEnclosingScope(parent); t.reportCodeChange(); } private static Node fixupSrcref(Node node) { node.srcref(node.getFirstChild()); return node; } private static Node fixupFreeCall(Node call) { checkState(call.isCall()); call.putBooleanProp(Node.FREE_CALL, true); return call; } /** * @return Whether the call represents a class definition. */ static boolean isGoogDefineClass(Node value) { if (value != null && value.isCall()) { return value.getFirstChild().matchesQualifiedName("goog.defineClass"); } return false; } static final String VIRTUAL_FILE = "<ClosureRewriteClass.java>"; private JSDocInfo mergeJsDocFor(ClassDefinition cls, Node associatedNode) { // avoid null checks JSDocInfo classInfo = (cls.classInfo != null) ? cls.classInfo : new JSDocInfoBuilder(true).build(true); JSDocInfo ctorInfo = (cls.constructor.info != null) ? cls.constructor.info : new JSDocInfoBuilder(true).build(true); Node superNode = cls.superClass; // Start with a clone of the constructor info if there is one. JSDocInfoBuilder mergedInfo = cls.constructor.info != null ? JSDocInfoBuilder.copyFrom(ctorInfo) : new JSDocInfoBuilder(true); // merge block description String blockDescription = Joiner.on("\n").skipNulls().join( classInfo.getBlockDescription(), ctorInfo.getBlockDescription()); if (!blockDescription.isEmpty()) { mergedInfo.recordBlockDescription(blockDescription); } // merge suppressions Set<String> suppressions = new HashSet<>(); suppressions.addAll(classInfo.getSuppressions()); suppressions.addAll(ctorInfo.getSuppressions()); if (!suppressions.isEmpty()) { mergedInfo.recordSuppressions(suppressions); } // Use class deprecation if set. if (classInfo.isDeprecated()) { mergedInfo.recordDeprecated(); } String deprecationReason = null; if (classInfo.getDeprecationReason() != null) { deprecationReason = classInfo.getDeprecationReason(); mergedInfo.recordDeprecationReason(deprecationReason); } // Use class visibility if specifically set Visibility visibility = classInfo.getVisibility(); if (visibility != null && visibility != JSDocInfo.Visibility.INHERITED) { mergedInfo.recordVisibility(classInfo.getVisibility()); } if (classInfo.isAbstract()) { mergedInfo.recordAbstract(); } if (classInfo.isConstant()) { mergedInfo.recordConstancy(); } if (classInfo.isExport()) { mergedInfo.recordExport(); } // If @ngInject is on the ctor, it's already been copied above. if (classInfo.isNgInject()) { compiler.report(JSError.make(associatedNode, GOOG_CLASS_NG_INJECT_ON_CLASS)); mergedInfo.recordNgInject(true); } if (classInfo.makesUnrestricted() || ctorInfo.makesUnrestricted()) { mergedInfo.recordUnrestricted(); } else if (classInfo.makesDicts() || ctorInfo.makesDicts()) { mergedInfo.recordDict(); } else { // @struct by default mergedInfo.recordStruct(); } // @constructor is implied, @interface must be explicit boolean isInterface = classInfo.isInterface() || ctorInfo.isInterface(); if (isInterface) { if (classInfo.usesImplicitMatch() || ctorInfo.usesImplicitMatch()) { mergedInfo.recordImplicitMatch(); } else { mergedInfo.recordInterface(); } List<JSTypeExpression> extendedInterfaces = null; if (classInfo.getExtendedInterfacesCount() > 0) { extendedInterfaces = classInfo.getExtendedInterfaces(); } else if (ctorInfo.getExtendedInterfacesCount() == 0 && superNode != null) { extendedInterfaces = ImmutableList.of(new JSTypeExpression( new Node(Token.BANG, IR.string(superNode.getQualifiedName())), VIRTUAL_FILE)); } if (extendedInterfaces != null) { for (JSTypeExpression extend : extendedInterfaces) { mergedInfo.recordExtendedInterface(extend); } } } else { // @constructor by default mergedInfo.recordConstructor(); if (classInfo.getBaseType() != null) { mergedInfo.recordBaseType(classInfo.getBaseType()); } else if (superNode != null) { // a "super" implies @extends, build a default. JSTypeExpression baseType = new JSTypeExpression( new Node(Token.BANG, IR.string(superNode.getQualifiedName())), VIRTUAL_FILE); mergedInfo.recordBaseType(baseType); } // @implements from the class if they exist List<JSTypeExpression> interfaces = classInfo.getImplementedInterfaces(); for (JSTypeExpression implemented : interfaces) { mergedInfo.recordImplementedInterface(implemented); } } // merge @template types if they exist List<String> templateNames = new ArrayList<>(); templateNames.addAll(classInfo.getTemplateTypeNames()); templateNames.addAll(ctorInfo.getTemplateTypeNames()); for (String typeName : templateNames) { mergedInfo.recordTemplateTypeName(typeName); } return mergedInfo.build(); } }
/** * * Copyright 2003-2005 Jive Software. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jivesoftware.smackx.jingleold.packet; import org.jivesoftware.smack.packet.ExtensionElement; import org.jivesoftware.smackx.jingleold.nat.ICECandidate; import org.jivesoftware.smackx.jingleold.nat.TransportCandidate; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; /** * A jingle transport extension. * * @author Alvaro Saurin <alvaro.saurin@gmail.com> */ public class JingleTransport implements ExtensionElement { // static public static final String NODENAME = "transport"; // non-static protected String namespace; protected final List<JingleTransportCandidate> candidates = new ArrayList<JingleTransportCandidate>(); /** * Default constructor. */ public JingleTransport() { super(); } /** * Utility constructor, with a transport candidate element. * * @param candidate A transport candidate element to add. */ public JingleTransport(final JingleTransportCandidate candidate) { super(); addCandidate(candidate); } /** * Copy constructor. * * @param tr the other jingle transport. */ public JingleTransport(final JingleTransport tr) { if (tr != null) { namespace = tr.namespace; if (tr.candidates.size() > 0) { candidates.addAll(tr.candidates); } } } /** * Adds a transport candidate. * * @param candidate the candidate */ public void addCandidate(final JingleTransportCandidate candidate) { if (candidate != null) { synchronized (candidates) { candidates.add(candidate); } } } /** * Get an iterator for the candidates. * * @return an iterator */ public Iterator<JingleTransportCandidate> getCandidates() { return Collections.unmodifiableList(getCandidatesList()).iterator(); } /** * Get the list of candidates. * * @return The candidates list. */ public List<JingleTransportCandidate> getCandidatesList() { ArrayList<JingleTransportCandidate> res = null; synchronized (candidates) { res = new ArrayList<JingleTransportCandidate>(candidates); } return res; } /** * Get the number of transport candidates. * * @return The number of transport candidates contained. */ public int getCandidatesCount() { return getCandidatesList().size(); } /** * Returns the XML element name of the element. * * @return the XML element name of the element. */ @Override public String getElementName() { return NODENAME; } /** * Set the namespace. * * @param ns The namespace */ protected void setNamespace(final String ns) { namespace = ns; } /** * Get the namespace. * * @return The namespace */ @Override public String getNamespace() { return namespace; } /** * Return the XML representation for this element. */ @Override public String toXML() { StringBuilder buf = new StringBuilder(); buf.append('<').append(getElementName()).append(" xmlns=\""); buf.append(getNamespace()).append("\" "); synchronized (candidates) { if (getCandidatesCount() > 0) { buf.append('>'); Iterator<JingleTransportCandidate> iter = getCandidates(); while (iter.hasNext()) { JingleTransportCandidate candidate = iter.next(); buf.append(candidate.toXML()); } buf.append("</").append(getElementName()).append('>'); } else { buf.append("/>"); } } return buf.toString(); } /** * Candidate element in the transport. This class acts as a view of the * "TransportCandidate" in the Jingle space. * * @author Alvaro Saurin * @see TransportCandidate */ public static abstract class JingleTransportCandidate { public static final String NODENAME = "candidate"; // The transport candidate contained in the element. protected TransportCandidate transportCandidate; /** * Creates a new TransportNegotiator child. */ public JingleTransportCandidate() { super(); } /** * Creates a new TransportNegotiator child. * * @param candidate the jmf transport candidate */ public JingleTransportCandidate(final TransportCandidate candidate) { super(); setMediaTransport(candidate); } /** * Returns the XML element name of the element. * * @return the XML element name of the element. */ public static String getElementName() { return NODENAME; } /** * Get the current transportElement candidate. * * @return the transportElement candidate */ public TransportCandidate getMediaTransport() { return transportCandidate; } /** * Set the transportElement candidate. * * @param cand the transportElement candidate */ public void setMediaTransport(final TransportCandidate cand) { if (cand != null) { transportCandidate = cand; } } /** * Get the list of attributes. * * @return a string with the list of attributes. */ protected String getChildElements() { return null; } /** * Obtain a valid XML representation of a trancport candidate. * * @return A string containing the XML dump of the transport candidate. */ public String toXML() { StringBuilder buf = new StringBuilder(); String childElements = getChildElements(); if (transportCandidate != null && childElements != null) { buf.append('<').append(getElementName()).append(' '); buf.append(childElements); buf.append("/>"); } return buf.toString(); } } // Subclasses /** * RTP-ICE profile. */ public static class Ice extends JingleTransport { public static final String NAMESPACE = "urn:xmpp:tmp:jingle:transports:ice-udp"; public Ice() { super(); setNamespace(NAMESPACE); } /** * Add a transport candidate. * * @see org.jivesoftware.smackx.jingleold.packet.JingleTransport#addCandidate(org.jivesoftware.smackx.jingleold.packet.JingleTransport.JingleTransportCandidate) */ @Override public void addCandidate(final JingleTransportCandidate candidate) { super.addCandidate(candidate); } /** * Get the list of candidates. As a "raw-udp" transport can only contain * one candidate, we use the first in the list... * * @see org.jivesoftware.smackx.jingleold.packet.JingleTransport#getCandidates() */ @Override public List<JingleTransportCandidate> getCandidatesList() { List<JingleTransportCandidate> copy = new ArrayList<JingleTransportCandidate>(); List<JingleTransportCandidate> superCandidatesList = super.getCandidatesList(); for (int i = 0; i < superCandidatesList.size(); i++) { copy.add(superCandidatesList.get(i)); } return copy; } public static class Candidate extends JingleTransportCandidate { /** * Default constructor. */ public Candidate() { super(); } /** * Constructor with a transport candidate. */ public Candidate(final TransportCandidate tc) { super(tc); } /** * Get the elements of this candidate. */ @Override protected String getChildElements() { StringBuilder buf = new StringBuilder(); if (transportCandidate != null) {// && transportCandidate instanceof ICECandidate) { ICECandidate tci = (ICECandidate) transportCandidate; // We convert the transportElement candidate to XML here... buf.append(" generation=\"").append(tci.getGeneration()).append('"'); buf.append(" ip=\"").append(tci.getIp()).append('"'); buf.append(" port=\"").append(tci.getPort()).append('"'); buf.append(" network=\"").append(tci.getNetwork()).append('"'); buf.append(" username=\"").append(tci.getUsername()).append('"'); buf.append(" password=\"").append(tci.getPassword()).append('"'); buf.append(" preference=\"").append(tci.getPreference()).append('"'); buf.append(" type=\"").append(tci.getType()).append('"'); // Optional elements if (transportCandidate.getName() != null) { buf.append(" name=\"").append(tci.getName()).append('"'); } } return buf.toString(); } } } /** * Raw UDP profile. */ public static class RawUdp extends JingleTransport { public static final String NAMESPACE = "http://www.xmpp.org/extensions/xep-0177.html#ns"; public RawUdp() { super(); setNamespace(NAMESPACE); } /** * Add a transport candidate. * * @see org.jivesoftware.smackx.jingleold.packet.JingleTransport#addCandidate(org.jivesoftware.smackx.jingleold.packet.JingleTransport.JingleTransportCandidate) */ @Override public void addCandidate(final JingleTransportCandidate candidate) { candidates.clear(); super.addCandidate(candidate); } /** * Get the list of candidates. As a "raw-udp" transport can only contain * one candidate, we use the first in the list... * * @see org.jivesoftware.smackx.jingleold.packet.JingleTransport#getCandidates() */ @Override public List<JingleTransportCandidate> getCandidatesList() { List<JingleTransportCandidate> copy = new ArrayList<JingleTransportCandidate>(); List<JingleTransportCandidate> superCandidatesList = super.getCandidatesList(); if (superCandidatesList.size() > 0) { copy.add(superCandidatesList.get(0)); } return copy; } /** * Raw-udp transport candidate. */ public static class Candidate extends JingleTransportCandidate { /** * Default constructor. */ public Candidate() { super(); } /** * Constructor with a transport candidate. */ public Candidate(final TransportCandidate tc) { super(tc); } /** * Get the elements of this candidate. */ @Override protected String getChildElements() { StringBuilder buf = new StringBuilder(); if (transportCandidate != null && transportCandidate instanceof TransportCandidate.Fixed) { TransportCandidate.Fixed tcf = (TransportCandidate.Fixed) transportCandidate; buf.append(" generation=\"").append(tcf.getGeneration()).append('"'); buf.append(" ip=\"").append(tcf.getIp()).append('"'); buf.append(" port=\"").append(tcf.getPort()).append('"'); // Optional parameters String name = tcf.getName(); if (name != null) { buf.append(" name=\"").append(name).append('"'); } } return buf.toString(); } } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.search; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.index.Term; import org.apache.lucene.queries.BlendedTermQuery; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.DisjunctionMaxQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchIllegalStateException; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.MultiMatchQueryBuilder; import org.elasticsearch.index.query.QueryParseContext; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; public class MultiMatchQuery extends MatchQuery { private Float groupTieBreaker = null; public void setTieBreaker(float tieBreaker) { this.groupTieBreaker = tieBreaker; } public MultiMatchQuery(QueryParseContext parseContext) { super(parseContext); } private Query parseAndApply(Type type, String fieldName, Object value, String minimumShouldMatch, Float boostValue) throws IOException { Query query = parse(type, fieldName, value); if (query instanceof BooleanQuery) { Queries.applyMinimumShouldMatch((BooleanQuery) query, minimumShouldMatch); } if (boostValue != null && query != null) { query.setBoost(boostValue); } return query; } public Query parse(MultiMatchQueryBuilder.Type type, Map<String, Float> fieldNames, Object value, String minimumShouldMatch) throws IOException { if (fieldNames.size() == 1) { Map.Entry<String, Float> fieldBoost = fieldNames.entrySet().iterator().next(); Float boostValue = fieldBoost.getValue(); return parseAndApply(type.matchQueryType(), fieldBoost.getKey(), value, minimumShouldMatch, boostValue); } final float tieBreaker = groupTieBreaker == null ? type.tieBreaker() : groupTieBreaker; switch (type) { case PHRASE: case PHRASE_PREFIX: case BEST_FIELDS: case MOST_FIELDS: queryBuilder = new QueryBuilder(tieBreaker); break; case CROSS_FIELDS: queryBuilder = new CrossFieldsQueryBuilder(tieBreaker); break; default: throw new ElasticsearchIllegalStateException("No such type: " + type); } final List<? extends Query> queries = queryBuilder.buildGroupedQueries(type, fieldNames, value, minimumShouldMatch); return queryBuilder.conbineGrouped(queries); } private QueryBuilder queryBuilder; public class QueryBuilder { protected final boolean groupDismax; protected final float tieBreaker; public QueryBuilder(float tieBreaker) { this(tieBreaker != 1.0f, tieBreaker); } public QueryBuilder(boolean groupDismax, float tieBreaker) { this.groupDismax = groupDismax; this.tieBreaker = tieBreaker; } public List<Query> buildGroupedQueries(MultiMatchQueryBuilder.Type type, Map<String, Float> fieldNames, Object value, String minimumShouldMatch) throws IOException{ List<Query> queries = new ArrayList<Query>(); for (String fieldName : fieldNames.keySet()) { Float boostValue = fieldNames.get(fieldName); Query query = parseGroup(type.matchQueryType(), fieldName, boostValue, value, minimumShouldMatch); if (query != null) { queries.add(query); } } return queries; } public Query parseGroup(Type type, String field, Float boostValue, Object value, String minimumShouldMatch) throws IOException { return parseAndApply(type, field, value, minimumShouldMatch, boostValue); } public Query conbineGrouped(List<? extends Query> groupQuery) { if (groupQuery == null || groupQuery.isEmpty()) { return null; } if (groupQuery.size() == 1) { return groupQuery.get(0); } if (groupDismax) { DisjunctionMaxQuery disMaxQuery = new DisjunctionMaxQuery(tieBreaker); for (Query query : groupQuery) { disMaxQuery.add(query); } return disMaxQuery; } else { final BooleanQuery booleanQuery = new BooleanQuery(); for (Query query : groupQuery) { booleanQuery.add(query, BooleanClause.Occur.SHOULD); } return booleanQuery; } } public Query blendTerm(Term term, FieldMapper mapper) { return MultiMatchQuery.super.blendTermQuery(term, mapper); } public boolean forceAnalyzeQueryString() { return false; } } public class CrossFieldsQueryBuilder extends QueryBuilder { private FieldAndMapper[] blendedFields; public CrossFieldsQueryBuilder(float tieBreaker) { super(false, tieBreaker); } public List<Query> buildGroupedQueries(MultiMatchQueryBuilder.Type type, Map<String, Float> fieldNames, Object value, String minimumShouldMatch) throws IOException { Map<Analyzer, List<FieldAndMapper>> groups = new HashMap<Analyzer, List<FieldAndMapper>>(); List<Tuple<String, Float>> missing = new ArrayList<Tuple<String, Float>>(); for (Map.Entry<String, Float> entry : fieldNames.entrySet()) { String name = entry.getKey(); MapperService.SmartNameFieldMappers smartNameFieldMappers = parseContext.smartFieldMappers(name); if (smartNameFieldMappers != null && smartNameFieldMappers.hasMapper()) { Analyzer actualAnalyzer = getAnalyzer(smartNameFieldMappers.mapper(), smartNameFieldMappers); name = smartNameFieldMappers.mapper().names().indexName(); if (!groups.containsKey(actualAnalyzer)) { groups.put(actualAnalyzer, new ArrayList<FieldAndMapper>()); } Float boost = entry.getValue(); boost = boost == null ? Float.valueOf(1.0f) : boost; groups.get(actualAnalyzer).add(new FieldAndMapper(name, smartNameFieldMappers.mapper(), boost)); } else { missing.add(new Tuple(name, entry.getValue())); } } List<Query> queries = new ArrayList<Query>(); for (Tuple<String, Float> tuple : missing) { Query q = parseGroup(type.matchQueryType(), tuple.v1(), tuple.v2(), value, minimumShouldMatch); if (q != null) { queries.add(q); } } for (List<FieldAndMapper> group : groups.values()) { if (group.size() > 1) { blendedFields = new FieldAndMapper[group.size()]; int i = 0; for (FieldAndMapper fieldAndMapper : group) { blendedFields[i++] = fieldAndMapper; } } else { blendedFields = null; } final FieldAndMapper fieldAndMapper= group.get(0); Query q = parseGroup(type.matchQueryType(), fieldAndMapper.field, fieldAndMapper.boost, value, minimumShouldMatch); if (q != null) { queries.add(q); } } return queries.isEmpty() ? null : queries; } public boolean forceAnalyzeQueryString() { return blendedFields != null; } public Query blendTerm(Term term, FieldMapper mapper) { if (blendedFields == null) { return super.blendTerm(term, mapper); } final Term[] terms = new Term[blendedFields.length]; float[] blendedBoost = new float[blendedFields.length]; for (int i = 0; i < blendedFields.length; i++) { terms[i] = blendedFields[i].newTerm(term.text()); blendedBoost[i] = blendedFields[i].boost; } if (commonTermsCutoff != null) { return BlendedTermQuery.commonTermsBlendedQuery(terms, blendedBoost, false, commonTermsCutoff); } if (tieBreaker == 1.0f) { return BlendedTermQuery.booleanBlendedQuery(terms, blendedBoost, false); } return BlendedTermQuery.dismaxBlendedQuery(terms, blendedBoost, tieBreaker); } } @Override protected Query blendTermQuery(Term term, FieldMapper mapper) { if (queryBuilder == null) { return super.blendTermQuery(term, mapper); } return queryBuilder.blendTerm(term, mapper); } private static final class FieldAndMapper { final String field; final FieldMapper mapper; final float boost; private FieldAndMapper(String field, FieldMapper mapper, float boost) { this.field = field; this.mapper = mapper; this.boost = boost; } public Term newTerm(String value) { try { final BytesRef bytesRef = mapper.indexedValueForSearch(value); return new Term(field, bytesRef); } catch (Exception ex) { // we can't parse it just use the incoming value -- it will // just have a DF of 0 at the end of the day and will be ignored } return new Term(field, value); } } protected boolean forceAnalyzeQueryString() { return this.queryBuilder.forceAnalyzeQueryString(); } }
/* * Copyright 2015-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific language * governing permissions and limitations under the License. */ package org.lastaflute.core.json.engine; import java.lang.reflect.Field; import java.lang.reflect.ParameterizedType; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Predicate; import org.dbflute.util.DfCollectionUtil; import org.dbflute.util.DfReflectionUtil; import org.lastaflute.core.json.JsonMappingOption; import org.lastaflute.core.json.JsonMappingOption.JsonFieldNaming; import org.lastaflute.core.json.adapter.BooleanGsonAdaptable; import org.lastaflute.core.json.adapter.CollectionGsonAdaptable; import org.lastaflute.core.json.adapter.DBFluteGsonAdaptable; import org.lastaflute.core.json.adapter.Java8TimeGsonAdaptable; import org.lastaflute.core.json.adapter.NumberGsonAdaptable; import org.lastaflute.core.json.adapter.StringGsonAdaptable; import org.lastaflute.core.json.bind.JsonYourCollectionResource; import org.lastaflute.core.json.bind.JsonYourScalarResource; import org.lastaflute.core.json.filter.JsonUnifiedTextReadingFilter; import com.google.gson.FieldNamingPolicy; import com.google.gson.FieldNamingStrategy; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.gson.internal.ConstructorConstructor; import com.google.gson.internal.Excluder; import com.google.gson.internal.bind.JsonAdapterAnnotationTypeAdapterFactory; import com.google.gson.internal.bind.LaReflectiveTypeAdapterFactory; import com.google.gson.internal.bind.LaYourCollectionTypeAdapterFactory; import com.google.gson.internal.bind.LaYourScalarTypeAdapterFactory; import com.google.gson.internal.bind.ReflectiveTypeAdapterFactory; /** * @author jflute */ public class GsonJsonEngine implements RealJsonEngine // adapters here , StringGsonAdaptable, NumberGsonAdaptable, Java8TimeGsonAdaptable, BooleanGsonAdaptable // basic , CollectionGsonAdaptable, DBFluteGsonAdaptable { // =================================================================================== // Attribute // ========= protected final JsonMappingOption option; // not null protected final Gson gson; // not null // =================================================================================== // Constructor // =========== public GsonJsonEngine(Consumer<GsonBuilder> oneArgLambda, Consumer<JsonMappingOption> opLambda) { option = createOption(opLambda); // should be before creating Gson gson = createGson(oneArgLambda); // using option variable } // ----------------------------------------------------- // Mapping Option // -------------- protected JsonMappingOption createOption(Consumer<JsonMappingOption> opLambda) { final JsonMappingOption option = newJsonMappingOption(); opLambda.accept(option); return option; } protected JsonMappingOption newJsonMappingOption() { return new JsonMappingOption(); } // ----------------------------------------------------- // Gson Object // ----------- protected Gson createGson(Consumer<GsonBuilder> settings) { final GsonBuilder builder = newGsonBuilder(); setupDefaultSettings(builder); setupYourSettings(builder); acceptGsonSettings(settings, builder); final Gson newGson = builder.create(); switchFactories(newGson); return newGson; } protected GsonBuilder newGsonBuilder() { return new GsonBuilder(); } // ----------------------------------------------------- // Default Settings // ---------------- protected void setupDefaultSettings(GsonBuilder builder) { registerStringAdapter(builder); registerNumberAdapter(builder); registerJava8TimeAdapter(builder); registerBooleanAdapter(builder); registerCollectionAdapter(builder); registerDBFluteAdapter(builder); registerUtilDateFormat(builder); setupFieldPolicy(builder); setupYourCollectionSettings(builder); setupYourScalarSettings(builder); setupYourUltimateSettings(builder); } protected void registerStringAdapter(GsonBuilder builder) { builder.registerTypeAdapterFactory(createStringTypeAdapterFactory()); } protected void registerNumberAdapter(GsonBuilder builder) { // to show property path in exception message createNumberFactoryList().forEach(factory -> builder.registerTypeAdapterFactory(factory)); } protected void registerJava8TimeAdapter(GsonBuilder builder) { // until supported by Gson builder.registerTypeAdapterFactory(createDateTimeTypeAdapterFactory()); } protected void registerBooleanAdapter(GsonBuilder builder) { // to adjust boolean expression flexibly builder.registerTypeAdapterFactory(createBooleanTypeAdapterFactory()); } protected void registerCollectionAdapter(GsonBuilder builder) { // for option of list-null-to-empty builder.registerTypeAdapterFactory(createCollectionTypeAdapterFactory()); } protected void registerDBFluteAdapter(GsonBuilder builder) { builder.registerTypeAdapterFactory(createClassificationTypeAdapterFactory()); } protected void registerUtilDateFormat(GsonBuilder builder) { builder.setDateFormat("yyyy-MM-dd'T'HH:mm:ss"); // same as local date-time } // ----------------------------------------------------- // Field Policy // ------------ protected void setupFieldPolicy(GsonBuilder builder) { final JsonFieldNaming naming = option.getFieldNaming().orElse(getDefaultFieldNaming()); builder.setFieldNamingPolicy(deriveFieldNamingPolicy(naming)); } protected JsonFieldNaming getDefaultFieldNaming() { return JsonFieldNaming.IDENTITY; // as default } protected FieldNamingPolicy deriveFieldNamingPolicy(JsonFieldNaming naming) { final FieldNamingPolicy policy; if (naming == JsonFieldNaming.IDENTITY) { policy = FieldNamingPolicy.IDENTITY; } else if (naming == JsonFieldNaming.CAMEL_TO_LOWER_SNAKE) { policy = FieldNamingPolicy.LOWER_CASE_WITH_UNDERSCORES; } else { throw new IllegalStateException("Unknown field naming: " + naming); } return policy; } // ----------------------------------------------------- // Your Collection // --------------- protected void setupYourCollectionSettings(GsonBuilder builder) { final List<JsonYourCollectionResource> yourCollections = option.getYourCollections(); for (JsonYourCollectionResource resource : yourCollections) { builder.registerTypeAdapterFactory(createYourCollectionTypeAdapterFactory(resource)); } } protected LaYourCollectionTypeAdapterFactory createYourCollectionTypeAdapterFactory(JsonYourCollectionResource resource) { return newLaYourCollectionTypeAdapterFactory(resource.getYourType(), resource.getYourCollectionCreator()); } protected LaYourCollectionTypeAdapterFactory newLaYourCollectionTypeAdapterFactory(Class<?> yourType, Function<Collection<? extends Object>, Iterable<? extends Object>> yourCollectionCreator) { return new LaYourCollectionTypeAdapterFactory(yourType, yourCollectionCreator); } // ----------------------------------------------------- // Your Scalar // ----------- protected void setupYourScalarSettings(GsonBuilder builder) { final List<JsonYourScalarResource> yourScalars = option.getYourScalars(); for (JsonYourScalarResource resource : yourScalars) { builder.registerTypeAdapterFactory(createYourScalarTypeAdapterFactory(resource)); } } protected LaYourScalarTypeAdapterFactory<Object> createYourScalarTypeAdapterFactory(JsonYourScalarResource resource) { // forcedly fitting, factory needs generic type @SuppressWarnings("unchecked") final Class<Object> yourType = (Class<Object>) resource.getYourType(); @SuppressWarnings("unchecked") final Function<String, Object> reader = (Function<String, Object>) resource.getReader(); @SuppressWarnings("unchecked") final Function<Object, String> writer = (Function<Object, String>) resource.getWriter(); final JsonUnifiedTextReadingFilter readingFilter = JsonUnifiedTextReadingFilter.unify(option); // null allowed final Predicate<Class<?>> emptyToNullReadingDeterminer = option.getEmptyToNullReadingDeterminer().orElse(null); final Predicate<Class<?>> nullToEmptyWritingDeterminer = option.getNullToEmptyWritingDeterminer().orElse(null); return new LaYourScalarTypeAdapterFactory<Object>(yourType, reader, writer // scalar type, main function , readingFilter, emptyToNullReadingDeterminer, nullToEmptyWritingDeterminer // as basic option ); } protected void setupYourUltimateSettings(GsonBuilder builder) { option.getYourUltimateCustomizer().ifPresent(customizer -> { customizer.accept(builder); }); } // ----------------------------------------------------- // Your Settings // ------------- protected void setupYourSettings(GsonBuilder builder) { // you can override } // ----------------------------------------------------- // Gson Settings // ------------- protected void acceptGsonSettings(Consumer<GsonBuilder> settings, GsonBuilder builder) { settings.accept(builder); } // ----------------------------------------------------- // Dangerous Switch // ---------------- protected void switchFactories(Gson newGson) { final Field factoriesField = DfReflectionUtil.getWholeField(newGson.getClass(), "factories"); @SuppressWarnings("unchecked") final List<Object> factories = (List<Object>) DfReflectionUtil.getValueForcedly(factoriesField, newGson); final List<Object> filtered = new ArrayList<Object>(); for (Object factory : factories) { if (factory instanceof ReflectiveTypeAdapterFactory) { // switched, only one time filtered.add(createReflectiveTypeAdapterFactory(newGson, factory)); } else { filtered.add(factory); } } DfReflectionUtil.setValueForcedly(factoriesField, newGson, Collections.unmodifiableList(filtered)); } protected LaReflectiveTypeAdapterFactory createReflectiveTypeAdapterFactory(Gson newGson, Object factory) { final ConstructorConstructor constructorConstructor = getConstructorConstructor(factory); final JsonAdapterAnnotationTypeAdapterFactory jsonAdapterFactory = getJsonAdapterFactory(factory); final FieldNamingStrategy fieldNamingStrategy = newGson.fieldNamingStrategy(); final Excluder excluder = newGson.excluder(); return new LaReflectiveTypeAdapterFactory(constructorConstructor, fieldNamingStrategy, excluder, jsonAdapterFactory); } protected ConstructorConstructor getConstructorConstructor(Object factory) { final Field field = DfReflectionUtil.getWholeField(factory.getClass(), "constructorConstructor"); return (ConstructorConstructor) DfReflectionUtil.getValueForcedly(field, factory); } protected JsonAdapterAnnotationTypeAdapterFactory getJsonAdapterFactory(Object factory) { final Field field = DfReflectionUtil.getWholeField(factory.getClass(), "jsonAdapterFactory"); return (JsonAdapterAnnotationTypeAdapterFactory) DfReflectionUtil.getValueForcedly(field, factory); } // =================================================================================== // JSON Interface // ============== @Override public <BEAN> BEAN fromJson(String json, Class<BEAN> beanType) { // are not null, already checked final BEAN bean = gson.fromJson(json, beanType); // if empty JSON, new-only instance return bean != null ? bean : newEmptyInstance(beanType); } @Override public <BEAN> BEAN fromJsonParameteried(String json, ParameterizedType parameterizedType) { final BEAN bean = gson.fromJson(json, parameterizedType); // if empty JSON, new-only instance if (bean != null) { return bean; } else { // e.g. empty string JSON final Class<?> rawClass = DfReflectionUtil.getRawClass(parameterizedType.getRawType()); // null allowed? if (List.class.equals(rawClass)) { @SuppressWarnings("unchecked") final BEAN emptyList = (BEAN) DfCollectionUtil.newArrayListSized(2); // empty but mutable for coherence return emptyList; } else if (Map.class.equals(rawClass)) { @SuppressWarnings("unchecked") final BEAN emptyList = (BEAN) DfCollectionUtil.newHashMapSized(2); // empty but mutable for coherence return emptyList; } else { return newEmptyInstance(parameterizedType); } } } @SuppressWarnings("unchecked") protected <BEAN> BEAN newEmptyInstance(ParameterizedType parameterizedType) { final Class<?> rawClass = DfReflectionUtil.getRawClass(parameterizedType); if (rawClass == null) { throw new IllegalStateException("Cannot get raw type from the parameterized type: " + parameterizedType); } return (BEAN) newEmptyInstance(rawClass); } @SuppressWarnings("unchecked") protected <BEAN> BEAN newEmptyInstance(Class<BEAN> beanType) { return (BEAN) DfReflectionUtil.newInstance(beanType); } @Override public String toJson(Object bean) { // is not null, already checked return gson.toJson(bean); } // =================================================================================== // Accessor // ======== @Override public JsonMappingOption getGsonOption() { return option; } }
package lebah.portal; import java.io.IOException; import java.io.PrintWriter; import java.util.Hashtable; import javax.portlet.GenericPortlet; import javax.portlet.PortletConfig; import javax.portlet.PortletMode; import javax.portlet.RenderRequest; import javax.portlet.RenderResponse; import javax.portlet.WindowState; import javax.servlet.ServletConfig; import javax.servlet.ServletContext; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import org.apache.velocity.VelocityContext; import org.apache.velocity.app.VelocityEngine; import lebah.portal.ClassLoadManager; import lebah.portal.ErrorMsg; import lebah.portal.MerakConfig; import lebah.portal.MerakContext; import lebah.portal.MerakRequest; import lebah.portal.MerakResponse; import lebah.portal.ModulePopWindow; import lebah.portal.PCDeviceController; import lebah.portal.PortletInfo; import lebah.portal.db.CustomClass; import lebah.portal.velocity.VServlet; import lebah.portal.velocity.VTemplate; import lebah.util.Util; public class AppController extends VServlet { public void doGet(HttpServletRequest req, HttpServletResponse res) throws ServletException, IOException { doPost(req, res); } public void doPost(HttpServletRequest req, HttpServletResponse res) throws ServletException, IOException { HttpSession session = req.getSession(); // ============================= // INITIALIZING VELOCITY CONTEXT // ============================= //synchronized(this) { context = (org.apache.velocity.VelocityContext) session.getAttribute("VELOCITY_CONTEXT"); engine = (org.apache.velocity.app.VelocityEngine) session.getAttribute("VELOCITY_ENGINE"); if (context == null || engine == null) { if (context == null) { System.out.println("[ControllerServlet2] Velocity context is Null."); } if (engine == null) { System.out.println("[ControllerServlet2] Velocity engine is Null."); } initVelocity(getServletConfig()); session.setAttribute("VELOCITY_CONTEXT", context); session.setAttribute("VELOCITY_ENGINE", engine); } //} context.put("util", new Util()); //LABELS Properties //context.put("label", lebah.db.Labels.getInstance().getTitles()); String userAgent = req.getHeader("User-Agent"); context.put("userAgent", userAgent); if ( userAgent.indexOf("MSIE") > 0 ) { context.put("browser", "ie"); } else if ( userAgent.indexOf("Firefox") > 0 ) { context.put("browser", "firefox"); } else if ( userAgent.indexOf("Netscape") > 0 ) { context.put("browser", "netscape"); } else if ( userAgent.indexOf("Safari") > 0 ) { context.put("browser", "safari"); } else if ( userAgent.indexOf("MIDP") > 0 ) { context.put("browser", "midp"); } doService(getServletContext(), getServletConfig(), engine, context, session, req, res); } public void doService(ServletContext servletContext, ServletConfig servletConfig, VelocityEngine engine, VelocityContext context, HttpSession session, HttpServletRequest req, HttpServletResponse res) throws IOException { PrintWriter out = res.getWriter(); res.setContentType("text/html"); String app_path = servletContext.getRealPath("/"); //getServletContext().getRealPath("/"); app_path = app_path.replace('\\', '/'); session.setAttribute("_portal_app_path", app_path); String _portal_login = (String) session.getAttribute("_portal_login"); if ( _portal_login == null || "".equals(_portal_login)) { session.setAttribute("_portal_login", "none"); } String serverName = req.getServerName(); int serverPort = req.getServerPort(); session.setAttribute("_portal_server", serverPort != 80 ? serverName + ":" + serverPort : serverName ); context.put("server", serverPort != 80 ? serverName + ":" + serverPort : serverName); String uri = req.getRequestURI(); String s1 = uri.substring(1); context.put("appname", s1.substring(0, s1.indexOf("/"))); session.setAttribute("_portal_appname", s1.substring(0, s1.indexOf("/"))); //get pathinfo String pathInfo = req.getPathInfo(); String queryString = req.getQueryString(); context.put("queryString", queryString); pathInfo = pathInfo.substring(1); //get rid of the first '/' String className = ""; //pathInfo only contains action pathInfo = pathInfo.substring(pathInfo.indexOf("/") + 1); String module = pathInfo != null ? pathInfo : ""; //module session.setAttribute("_portal_action", module); session.setAttribute("_portal_module", module); context.put("session", session); String ddir = "../"; context.put("relativeDir", ddir); // out.println("<html>"); // out.println("<title>eduCATE</title>"); //**HTML out.println("<html>"); out.println("<title>"); //**HTML //TITLE Title cTitle = new Title(engine, context, req, res); try { cTitle.print(); } catch ( Exception ex ) { ex.printStackTrace(); } //**HTML out.println("</title>"); out.println("<link href=\"../styles.css\" rel=\"stylesheet\" type=\"text/css\">"); out.println("<script type=\"text/javascript\" src=\"../scriptaculous/misc.js\" ></script>"); out.println("<script type=\"text/javascript\" src=\"../dropdown.js\" ></script>"); out.println("<script type=\"text/javascript\" src=\"../fck/fckeditor.js\"></script>"); out.println("<script type=\"text/javascript\" src=\"../fck/createFCKEditor.js\"></script>"); out.println("<script type=\"text/javascript\" src=\"../scriptaculous/prototype.js\" ></script>"); out.println("<script type=\"text/javascript\" src=\"../scriptaculous/scriptaculous.js\" ></script>"); out.println("<script type=\"text/javascript\" src=\"../scriptaculous/fixed.js\" ></script>"); out.println("<script type=\"text/javascript\" src=\"../scriptaculous/dragdrop.js\" ></script>"); out.println("<script type=\"text/javascript\" src=\"../scriptaculous/unittest.js\" ></script>"); out.println("<script type=\"text/javascript\" src=\"../scriptaculous/ajax.js\" ></script>"); out.println("<script type=\"text/javascript\" src=\"../scriptaculous/effects.js\" ></script>"); out.println("<script type=\"text/javascript\" src=\"../scriptaculous/EventDispatcher.js\" ></script>"); out.println("<script type=\"text/javascript\" src=\"../scriptaculous/FishEye.js\" ></script>"); out.println("<script type=\"text/javascript\" src=\"../d3/d3.min.js\" charset=\"utf-8\"></script>"); //js_css JS_CSS js_css = new JS_CSS(engine, context, req, res); try { js_css.print(); } catch ( Exception ex ) { ex.printStackTrace(); } out.println("<body leftmargin=\"0\" rightmargin=\"0\" topmargin=\"0\">"); boolean localAccess = false; if ( "127.0.0.1".equals(req.getRemoteAddr()) ) localAccess = true; if ( !"".equals(module) ) { try { Object content = null; try { String role = (String) session.getAttribute("_portal_role"); if ( !localAccess ) { className = CustomClass.getName(module, role); content = ClassLoadManager.load(className, module, req.getRequestedSessionId()); } else { className = module; content = ClassLoadManager.load(className, module, ""); } if ( content == null ) { content = new ErrorMsg(engine, context, req, res); ((ErrorMsg) content).setError("No privillege for " + module + " on role " + role + ", or this module is not registered."); ((VTemplate) content).print(session); } else if ( content instanceof GenericPortlet ) { PortletInfo portletInfo = new PortletInfo(); portletInfo.id = "test_id"; portletInfo.title = "Test Title"; Hashtable portletState = getPortletState(servletConfig, req, res, out, portletInfo); RenderRequest renderRequest = (RenderRequest) portletState.get("renderRequest"); RenderResponse renderResponse = (RenderResponse) portletState.get("renderResponse"); PortletConfig config = (PortletConfig) portletState.get("config"); GenericPortlet portlet = (GenericPortlet) content; portlet.init(config); String reqMethod = req.getMethod(); portlet.render(renderRequest, renderResponse); } else { ((VTemplate) content).setEnvironment(engine, context, req, res); ((VTemplate) content).setServletContext(servletConfig.getServletContext()); ((VTemplate) content).setServletConfig(servletConfig); ((VTemplate) content).setId(module); ((VTemplate) content).setDiv(false); try { if ( content != null ) { ((VTemplate) content).setShowVM(true); ((VTemplate) content).print(session); } } catch ( Exception ex ) { out.println( ex.getMessage() ); } } } catch ( ClassNotFoundException cnfex ) { content = new ErrorMsg(engine, context, req, res); ((ErrorMsg) content).setError("ClassNotFoundException : " + cnfex.getMessage()); ((VTemplate) content).print(session); } catch ( InstantiationException iex ) { content = new ErrorMsg(engine, context, req, res); ((ErrorMsg) content).setError("InstantiationException : " + iex.getMessage()); ((VTemplate) content).print(session); } catch ( IllegalAccessException illex ) { content = new ErrorMsg(engine, context, req, res); ((ErrorMsg) content).setError("IllegalAccessException : " + illex.getMessage()); ((VTemplate) content).print(session); } catch ( Exception ex ) { content = new ErrorMsg(engine, context, req, res); ((ErrorMsg) content).setError("Other Exception during class initiation : " + ex.getMessage()); ((VTemplate) content).print(session); ex.printStackTrace(); } } catch ( Exception ex ) { System.out.println( ex.getMessage() ); } finally { //long totalMem = Runtime.getRuntime().totalMemory(); //System.out.println("total memory = " + totalMem); } } out.println("</body>"); out.println("</html>"); } private static Hashtable getPortletState(ServletConfig svtCfg, HttpServletRequest req, HttpServletResponse res, PrintWriter out, PortletInfo portletInfo) throws Exception { Hashtable h = new Hashtable(); MerakContext context = new MerakContext(); context.httpServletRequest = req; MerakConfig config = new MerakConfig(); config.portletInfo = portletInfo; config.portletContext = context; MerakResponse renderResponse = new MerakResponse(); MerakRequest renderRequest = new MerakRequest(); renderRequest.windowState = WindowState.NORMAL; renderRequest.portletMode = PortletMode.VIEW; renderResponse.printWriter = out; renderRequest.httpServletRequest = req; renderResponse.httpServletResponse = res; h.put("renderRequest", renderRequest); h.put("renderResponse", renderResponse); h.put("config", config); return h; } }
/* * Licensed to Metamarkets Group Inc. (Metamarkets) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Metamarkets licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package io.druid.server.coordinator; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Set; public class CoordinatorDynamicConfig { public static final String CONFIG_KEY = "coordinator.config"; private final long millisToWaitBeforeDeleting; private final long mergeBytesLimit; private final int mergeSegmentsLimit; private final int maxSegmentsToMove; private final int replicantLifetime; private final int replicationThrottleLimit; private final int balancerComputeThreads; private final boolean emitBalancingStats; private final Set<String> killDataSourceWhitelist; @JsonCreator public CoordinatorDynamicConfig( @JsonProperty("millisToWaitBeforeDeleting") long millisToWaitBeforeDeleting, @JsonProperty("mergeBytesLimit") long mergeBytesLimit, @JsonProperty("mergeSegmentsLimit") int mergeSegmentsLimit, @JsonProperty("maxSegmentsToMove") int maxSegmentsToMove, @JsonProperty("replicantLifetime") int replicantLifetime, @JsonProperty("replicationThrottleLimit") int replicationThrottleLimit, @JsonProperty("balancerComputeThreads") int balancerComputeThreads, @JsonProperty("emitBalancingStats") boolean emitBalancingStats, @JsonProperty("killDataSourceWhitelist") Set<String> killDataSourceWhitelist ) { this.maxSegmentsToMove = maxSegmentsToMove; this.millisToWaitBeforeDeleting = millisToWaitBeforeDeleting; this.mergeSegmentsLimit = mergeSegmentsLimit; this.mergeBytesLimit = mergeBytesLimit; this.replicantLifetime = replicantLifetime; this.replicationThrottleLimit = replicationThrottleLimit; this.emitBalancingStats = emitBalancingStats; this.balancerComputeThreads = Math.max(balancerComputeThreads, 1); this.killDataSourceWhitelist = killDataSourceWhitelist; } @JsonProperty public long getMillisToWaitBeforeDeleting() { return millisToWaitBeforeDeleting; } @JsonProperty public long getMergeBytesLimit() { return mergeBytesLimit; } @JsonProperty public boolean emitBalancingStats() { return emitBalancingStats; } @JsonProperty public int getMergeSegmentsLimit() { return mergeSegmentsLimit; } @JsonProperty public int getMaxSegmentsToMove() { return maxSegmentsToMove; } @JsonProperty public int getReplicantLifetime() { return replicantLifetime; } @JsonProperty public int getReplicationThrottleLimit() { return replicationThrottleLimit; } @JsonProperty public int getBalancerComputeThreads() { return balancerComputeThreads; } @JsonProperty public Set<String> getKillDataSourceWhitelist() { return killDataSourceWhitelist; } @Override public String toString() { return "CoordinatorDynamicConfig{" + "millisToWaitBeforeDeleting=" + millisToWaitBeforeDeleting + ", mergeBytesLimit=" + mergeBytesLimit + ", mergeSegmentsLimit=" + mergeSegmentsLimit + ", maxSegmentsToMove=" + maxSegmentsToMove + ", replicantLifetime=" + replicantLifetime + ", replicationThrottleLimit=" + replicationThrottleLimit + ", balancerComputeThreads=" + balancerComputeThreads + ", emitBalancingStats=" + emitBalancingStats + ", killDataSourceWhitelist=" + killDataSourceWhitelist + '}'; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } CoordinatorDynamicConfig that = (CoordinatorDynamicConfig) o; if (millisToWaitBeforeDeleting != that.millisToWaitBeforeDeleting) { return false; } if (mergeBytesLimit != that.mergeBytesLimit) { return false; } if (mergeSegmentsLimit != that.mergeSegmentsLimit) { return false; } if (maxSegmentsToMove != that.maxSegmentsToMove) { return false; } if (replicantLifetime != that.replicantLifetime) { return false; } if (replicationThrottleLimit != that.replicationThrottleLimit) { return false; } if (balancerComputeThreads != that.balancerComputeThreads) { return false; } if (emitBalancingStats != that.emitBalancingStats) { return false; } return !(killDataSourceWhitelist != null ? !killDataSourceWhitelist.equals(that.killDataSourceWhitelist) : that.killDataSourceWhitelist != null); } @Override public int hashCode() { int result = (int) (millisToWaitBeforeDeleting ^ (millisToWaitBeforeDeleting >>> 32)); result = 31 * result + (int) (mergeBytesLimit ^ (mergeBytesLimit >>> 32)); result = 31 * result + mergeSegmentsLimit; result = 31 * result + maxSegmentsToMove; result = 31 * result + replicantLifetime; result = 31 * result + replicationThrottleLimit; result = 31 * result + balancerComputeThreads; result = 31 * result + (emitBalancingStats ? 1 : 0); result = 31 * result + (killDataSourceWhitelist != null ? killDataSourceWhitelist.hashCode() : 0); return result; } public static class Builder { private long millisToWaitBeforeDeleting; private long mergeBytesLimit; private int mergeSegmentsLimit; private int maxSegmentsToMove; private int replicantLifetime; private int replicationThrottleLimit; private boolean emitBalancingStats; private int balancerComputeThreads; private Set<String> killDataSourceWhitelist; public Builder() { this(15 * 60 * 1000L, 524288000L, 100, 5, 15, 10, 1, false, null); } private Builder( long millisToWaitBeforeDeleting, long mergeBytesLimit, int mergeSegmentsLimit, int maxSegmentsToMove, int replicantLifetime, int replicationThrottleLimit, int balancerComputeThreads, boolean emitBalancingStats, Set<String> killDataSourceWhitelist ) { this.millisToWaitBeforeDeleting = millisToWaitBeforeDeleting; this.mergeBytesLimit = mergeBytesLimit; this.mergeSegmentsLimit = mergeSegmentsLimit; this.maxSegmentsToMove = maxSegmentsToMove; this.replicantLifetime = replicantLifetime; this.replicationThrottleLimit = replicationThrottleLimit; this.emitBalancingStats = emitBalancingStats; this.balancerComputeThreads = balancerComputeThreads; this.killDataSourceWhitelist = killDataSourceWhitelist; } public Builder withMillisToWaitBeforeDeleting(long millisToWaitBeforeDeleting) { this.millisToWaitBeforeDeleting = millisToWaitBeforeDeleting; return this; } public Builder withMergeBytesLimit(long mergeBytesLimit) { this.mergeBytesLimit = mergeBytesLimit; return this; } public Builder withMergeSegmentsLimit(int mergeSegmentsLimit) { this.mergeSegmentsLimit = mergeSegmentsLimit; return this; } public Builder withMaxSegmentsToMove(int maxSegmentsToMove) { this.maxSegmentsToMove = maxSegmentsToMove; return this; } public Builder withReplicantLifetime(int replicantLifetime) { this.replicantLifetime = replicantLifetime; return this; } public Builder withReplicationThrottleLimit(int replicationThrottleLimit) { this.replicationThrottleLimit = replicationThrottleLimit; return this; } public Builder withBalancerComputeThreads(int balancerComputeThreads) { this.balancerComputeThreads = balancerComputeThreads; return this; } public Builder withKillDataSourceWhitelist(Set<String> killDataSourceWhitelist) { this.killDataSourceWhitelist = killDataSourceWhitelist; return this; } public CoordinatorDynamicConfig build() { return new CoordinatorDynamicConfig( millisToWaitBeforeDeleting, mergeBytesLimit, mergeSegmentsLimit, maxSegmentsToMove, replicantLifetime, replicationThrottleLimit, balancerComputeThreads, emitBalancingStats, killDataSourceWhitelist ); } } }
/* $Id$ */ package com.zoho.books.model; import org.json.JSONObject; /** * This class is used to make an object for address. */ public class Address { private String streetAddress1 = ""; private String streetAddress2 = ""; private String address = ""; private String city = ""; private String state = ""; private String country = ""; private String zip = ""; private String fax = ""; private boolean isUpdateCustomer = false; private String organizationAddressId; /** * set the street address1. * @param streetAddress1 Address of the street. */ public void setStreetAddress1(String streetAddress1) { this.streetAddress1 = streetAddress1; } /** * get the street address1. * @return Returns the street address1. */ public String getStreetAddress1() { return streetAddress1; } /** * set the street address2. * @param streetAddress2 Address of the street. */ public void setStreetAddress2(String streetAddress2) { this.streetAddress2 = streetAddress2; } /** * get the street address2. * @return Returns the street address2. */ public String getStreetAddress2() { return streetAddress2; } /** * set the address. * @param address Billing address. */ public void setAddress(String address) { this.address = address; } /** * get the address. * @return Returns the billing address. */ public String getAddress() { return address; } /** * set the city. * @param city Address of the city. */ public void setCity(String city) { this.city = city; } /** * get the city. * @return Returns the city of the address. */ public String getCity() { return city; } /** * set the state. * @param state Address of the state. */ public void setState(String state) { this.state = state; } /** * get the state. * @return Returns the state of the address. */ public String getState() { return state; } /** * set the country. * @param country Address of the country. */ public void setCountry(String country) { this.country = country; } /** * get the country. * @return Returns the country of the address. */ public String getCountry() { return country; } /** * set the zip code. * @param zip Address of the zip code. */ public void setZip(String zip) { this.zip = zip; } /** * get the zip. * @return Returns the zip code of the address. */ public String getZip() { return zip; } /** * set the fax. * @param fax Fax of the billing address. */ public void setFax(String fax) { this.fax = fax; } /** * get the fax. * @return Returns the fax of the billing address. */ public String getFax() { return fax; } /** * set is update customer. * @param isUpdateCustomer You can set this address as default billing address for your customer from now on by specifying param as true. */ public void setIsUpdateCustomer(boolean isUpdateCustomer) { this.isUpdateCustomer = isUpdateCustomer; } /** * get is update customer. * @return Returns true if the customer is update else returns false. */ public boolean isUpdateCustomer() { return isUpdateCustomer; } /** * Get the organization address id. * * @return Returns the organization address id. */ public String getOrganizationAddressId() { return organizationAddressId; } /** * Set the organization address id. * * @param organizationAddressId ID of the organization address. */ public void setOrganizationAddressId(String organizationAddressId) { this.organizationAddressId = organizationAddressId; } /** * Convert Address object into JSONObject. * @return Returns a JSONObject. * @throws Exception */ public JSONObject toJSON()throws Exception { JSONObject jsonObject = new JSONObject(); if(streetAddress1 != null && !streetAddress1.equals("")) { jsonObject.put("street_address1", streetAddress1); } if(streetAddress2 != null && !streetAddress2.equals("")) { jsonObject.put("street_address2", streetAddress2); } if(address != null) { jsonObject.put("address", address); } if(city != null) { jsonObject.put("city", city); } if(state != null) { jsonObject.put("state", state); } if(country != null) { jsonObject.put("country", country); } if(zip != null) { jsonObject.put("zip", zip); } if(fax != null && !fax.equals("")) { jsonObject.put("fax", fax); } if((Boolean)isUpdateCustomer != null) { jsonObject.put("is_update_customer", isUpdateCustomer); } if(organizationAddressId != null && !organizationAddressId.equals("")) { jsonObject.put("organization_address_id", organizationAddressId); } return jsonObject; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.solr.search; import static org.apache.solr.update.processor.DistributingUpdateProcessorFactory.DISTRIB_UPDATE_PARAM; import java.io.File; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.nio.charset.StandardCharsets; import java.util.ArrayDeque; import java.util.Arrays; import java.util.Deque; import java.util.LinkedList; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.concurrent.Future; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.cloud.hdfs.HdfsBasicDistributedZk2Test; import org.apache.solr.cloud.hdfs.HdfsTestUtil; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.update.DirectUpdateHandler2; import org.apache.solr.update.HdfsUpdateLog; import org.apache.solr.update.UpdateHandler; import org.apache.solr.update.UpdateLog; import org.apache.solr.update.processor.DistributedUpdateProcessor.DistribPhase; import org.apache.solr.util.IOUtils; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Ignore; import org.junit.Test; import org.noggit.ObjectBuilder; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope; @ThreadLeakScope(Scope.NONE) // hdfs mini cluster currently leaks threads // TODO: longer term this should be combined with TestRecovery somehow ?? public class TestRecoveryHdfs extends SolrTestCaseJ4 { // means that we've seen the leader and have version info (i.e. we are a non-leader replica) private static String FROM_LEADER = DistribPhase.FROMLEADER.toString(); private static int timeout=60; // acquire timeout in seconds. change this to a huge number when debugging to prevent threads from advancing. private static MiniDFSCluster dfsCluster; private static String hdfsUri; private static FileSystem fs; @BeforeClass public static void beforeClass() throws Exception { dfsCluster = HdfsTestUtil.setupClass(createTempDir().getAbsolutePath()); hdfsUri = dfsCluster.getFileSystem().getUri().toString(); try { URI uri = new URI(hdfsUri); fs = FileSystem.newInstance(uri, new Configuration()); } catch (IOException e) { throw new RuntimeException(e); } catch (URISyntaxException e) { throw new RuntimeException(e); } //hdfsDataDir = hdfsUri + "/solr/shard1"; // System.setProperty("solr.data.dir", hdfsUri + "/solr/shard1"); System.setProperty("solr.ulog.dir", hdfsUri + "/solr/shard1"); initCore("solrconfig-tlog.xml","schema15.xml"); } @AfterClass public static void afterClass() throws Exception { System.clearProperty("solr.ulog.dir"); System.clearProperty("solr.data.dir"); System.clearProperty("test.build.data"); System.clearProperty("test.cache.data"); deleteCore(); IOUtils.closeQuietly(fs); fs = null; HdfsTestUtil.teardownClass(dfsCluster); hdfsDataDir = null; dfsCluster = null; } // since we make up fake versions in these tests, we can get messed up by a DBQ with a real version // since Solr can think following updates were reordered. @Override public void clearIndex() { try { deleteByQueryAndGetVersion("*:*", params("_version_", Long.toString(-Long.MAX_VALUE), DISTRIB_UPDATE_PARAM,FROM_LEADER)); } catch (Exception e) { throw new RuntimeException(e); } } @Test public void testLogReplay() throws Exception { try { DirectUpdateHandler2.commitOnClose = false; final Semaphore logReplay = new Semaphore(0); final Semaphore logReplayFinish = new Semaphore(0); UpdateLog.testing_logReplayHook = new Runnable() { @Override public void run() { try { assertTrue(logReplay.tryAcquire(timeout, TimeUnit.SECONDS)); } catch (Exception e) { throw new RuntimeException(e); } } }; UpdateLog.testing_logReplayFinishHook = new Runnable() { @Override public void run() { logReplayFinish.release(); } }; clearIndex(); assertU(commit()); Deque<Long> versions = new ArrayDeque<>(); versions.addFirst(addAndGetVersion(sdoc("id", "A1"), null)); versions.addFirst(addAndGetVersion(sdoc("id", "A11"), null)); versions.addFirst(addAndGetVersion(sdoc("id", "A12"), null)); versions.addFirst(deleteByQueryAndGetVersion("id:A11", null)); versions.addFirst(addAndGetVersion(sdoc("id", "A13"), null)); assertJQ(req("q","*:*"),"/response/numFound==0"); assertJQ(req("qt","/get", "getVersions",""+versions.size()) ,"/versions==" + versions); h.close(); createCore(); // Solr should kick this off now // h.getCore().getUpdateHandler().getUpdateLog().recoverFromLog(); // verify that previous close didn't do a commit // recovery should be blocked by our hook assertJQ(req("q","*:*") ,"/response/numFound==0"); // make sure we can still access versions after a restart assertJQ(req("qt","/get", "getVersions",""+versions.size()),"/versions==" + versions); // unblock recovery logReplay.release(1000); // make sure we can still access versions during recovery assertJQ(req("qt","/get", "getVersions",""+versions.size()),"/versions==" + versions); // wait until recovery has finished assertTrue(logReplayFinish.tryAcquire(timeout, TimeUnit.SECONDS)); assertJQ(req("q","*:*") ,"/response/numFound==3"); // make sure we can still access versions after recovery assertJQ(req("qt","/get", "getVersions",""+versions.size()) ,"/versions==" + versions); assertU(adoc("id","A2")); assertU(adoc("id","A3")); assertU(delI("A2")); assertU(adoc("id","A4")); assertJQ(req("q","*:*") ,"/response/numFound==3"); h.close(); createCore(); // Solr should kick this off now // h.getCore().getUpdateHandler().getUpdateLog().recoverFromLog(); // wait until recovery has finished assertTrue(logReplayFinish.tryAcquire(timeout, TimeUnit.SECONDS)); assertJQ(req("q","*:*") ,"/response/numFound==5"); assertJQ(req("q","id:A2") ,"/response/numFound==0"); // no updates, so insure that recovery does not run h.close(); int permits = logReplay.availablePermits(); createCore(); // Solr should kick this off now // h.getCore().getUpdateHandler().getUpdateLog().recoverFromLog(); assertJQ(req("q","*:*") ,"/response/numFound==5"); Thread.sleep(100); assertEquals(permits, logReplay.availablePermits()); // no updates, so insure that recovery didn't run assertEquals(UpdateLog.State.ACTIVE, h.getCore().getUpdateHandler().getUpdateLog().getState()); } finally { DirectUpdateHandler2.commitOnClose = true; UpdateLog.testing_logReplayHook = null; UpdateLog.testing_logReplayFinishHook = null; } } @Test public void testBuffering() throws Exception { DirectUpdateHandler2.commitOnClose = false; final Semaphore logReplay = new Semaphore(0); final Semaphore logReplayFinish = new Semaphore(0); UpdateLog.testing_logReplayHook = new Runnable() { @Override public void run() { try { assertTrue(logReplay.tryAcquire(timeout, TimeUnit.SECONDS)); } catch (Exception e) { throw new RuntimeException(e); } } }; UpdateLog.testing_logReplayFinishHook = new Runnable() { @Override public void run() { logReplayFinish.release(); } }; SolrQueryRequest req = req(); UpdateHandler uhandler = req.getCore().getUpdateHandler(); UpdateLog ulog = uhandler.getUpdateLog(); try { clearIndex(); assertU(commit()); assertEquals(UpdateLog.State.ACTIVE, ulog.getState()); ulog.bufferUpdates(); assertEquals(UpdateLog.State.BUFFERING, ulog.getState()); Future<UpdateLog.RecoveryInfo> rinfoFuture = ulog.applyBufferedUpdates(); assertTrue(rinfoFuture == null); assertEquals(UpdateLog.State.ACTIVE, ulog.getState()); ulog.bufferUpdates(); assertEquals(UpdateLog.State.BUFFERING, ulog.getState()); // simulate updates from a leader updateJ(jsonAdd(sdoc("id","B1", "_version_","1010")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); updateJ(jsonAdd(sdoc("id","B11", "_version_","1015")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); updateJ(jsonDelQ("id:B1 id:B11 id:B2 id:B3"), params(DISTRIB_UPDATE_PARAM,FROM_LEADER, "_version_","-1017")); updateJ(jsonAdd(sdoc("id","B2", "_version_","1020")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); updateJ(jsonAdd(sdoc("id","B3", "_version_","1030")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); deleteAndGetVersion("B1", params(DISTRIB_UPDATE_PARAM,FROM_LEADER, "_version_","-2010")); assertJQ(req("qt","/get", "getVersions","6") ,"=={'versions':[-2010,1030,1020,-1017,1015,1010]}" ); assertU(commit()); assertJQ(req("qt","/get", "getVersions","6") ,"=={'versions':[-2010,1030,1020,-1017,1015,1010]}" ); // updates should be buffered, so we should not see any results yet. assertJQ(req("q", "*:*") , "/response/numFound==0" ); // real-time get should also not show anything (this could change in the future, // but it's currently used for validating version numbers too, so it would // be bad for updates to be visible if we're just buffering. assertJQ(req("qt","/get", "id","B3") ,"=={'doc':null}" ); rinfoFuture = ulog.applyBufferedUpdates(); assertTrue(rinfoFuture != null); assertEquals(UpdateLog.State.APPLYING_BUFFERED, ulog.getState()); logReplay.release(1000); UpdateLog.RecoveryInfo rinfo = rinfoFuture.get(); assertEquals(UpdateLog.State.ACTIVE, ulog.getState()); assertJQ(req("qt","/get", "getVersions","6") ,"=={'versions':[-2010,1030,1020,-1017,1015,1010]}" ); assertJQ(req("q", "*:*") , "/response/numFound==2" ); // move back to recovering ulog.bufferUpdates(); assertEquals(UpdateLog.State.BUFFERING, ulog.getState()); Long ver = getVer(req("qt","/get", "id","B3")); assertEquals(1030L, ver.longValue()); // add a reordered doc that shouldn't overwrite one in the index updateJ(jsonAdd(sdoc("id","B3", "_version_","3")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); // reorder two buffered updates updateJ(jsonAdd(sdoc("id","B4", "_version_","1040")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); deleteAndGetVersion("B4", params(DISTRIB_UPDATE_PARAM,FROM_LEADER, "_version_","-940")); // this update should not take affect updateJ(jsonAdd(sdoc("id","B6", "_version_","1060")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); updateJ(jsonAdd(sdoc("id","B5", "_version_","1050")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); updateJ(jsonAdd(sdoc("id","B8", "_version_","1080")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); // test that delete by query is at least buffered along with everything else so it will delete the // currently buffered id:8 (even if it doesn't currently support versioning) updateJ("{\"delete\": { \"query\":\"id:B2 OR id:B8\" }}", params(DISTRIB_UPDATE_PARAM,FROM_LEADER, "_version_","-3000")); assertJQ(req("qt","/get", "getVersions","13") ,"=={'versions':[-3000,1080,1050,1060,-940,1040,3,-2010,1030,1020,-1017,1015,1010]}" // the "3" appears because versions aren't checked while buffering ); logReplay.drainPermits(); rinfoFuture = ulog.applyBufferedUpdates(); assertTrue(rinfoFuture != null); assertEquals(UpdateLog.State.APPLYING_BUFFERED, ulog.getState()); // apply a single update logReplay.release(1); // now add another update updateJ(jsonAdd(sdoc("id","B7", "_version_","1070")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); // a reordered update that should be dropped deleteAndGetVersion("B5", params(DISTRIB_UPDATE_PARAM,FROM_LEADER, "_version_","-950")); deleteAndGetVersion("B6", params(DISTRIB_UPDATE_PARAM,FROM_LEADER, "_version_","-2060")); logReplay.release(1000); UpdateLog.RecoveryInfo recInfo = rinfoFuture.get(); assertJQ(req("q", "*:*", "sort","id asc", "fl","id,_version_") , "/response/docs==[" + "{'id':'B3','_version_':1030}" + ",{'id':'B4','_version_':1040}" + ",{'id':'B5','_version_':1050}" + ",{'id':'B7','_version_':1070}" +"]" ); assertEquals(1, recInfo.deleteByQuery); assertEquals(UpdateLog.State.ACTIVE, ulog.getState()); // leave each test method in a good state } finally { DirectUpdateHandler2.commitOnClose = true; UpdateLog.testing_logReplayHook = null; UpdateLog.testing_logReplayFinishHook = null; req().close(); } } @Test @Ignore("HDFS-3107: no truncate support yet") public void testDropBuffered() throws Exception { DirectUpdateHandler2.commitOnClose = false; final Semaphore logReplay = new Semaphore(0); final Semaphore logReplayFinish = new Semaphore(0); UpdateLog.testing_logReplayHook = new Runnable() { @Override public void run() { try { assertTrue(logReplay.tryAcquire(timeout, TimeUnit.SECONDS)); } catch (Exception e) { throw new RuntimeException(e); } } }; UpdateLog.testing_logReplayFinishHook = new Runnable() { @Override public void run() { logReplayFinish.release(); } }; SolrQueryRequest req = req(); UpdateHandler uhandler = req.getCore().getUpdateHandler(); UpdateLog ulog = uhandler.getUpdateLog(); try { clearIndex(); assertU(commit()); assertEquals(UpdateLog.State.ACTIVE, ulog.getState()); ulog.bufferUpdates(); assertEquals(UpdateLog.State.BUFFERING, ulog.getState()); Future<UpdateLog.RecoveryInfo> rinfoFuture = ulog.applyBufferedUpdates(); assertTrue(rinfoFuture == null); assertEquals(UpdateLog.State.ACTIVE, ulog.getState()); ulog.bufferUpdates(); assertEquals(UpdateLog.State.BUFFERING, ulog.getState()); // simulate updates from a leader updateJ(jsonAdd(sdoc("id","C1", "_version_","101")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); updateJ(jsonAdd(sdoc("id","C2", "_version_","102")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); updateJ(jsonAdd(sdoc("id","C3", "_version_","103")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); assertTrue(ulog.dropBufferedUpdates()); ulog.bufferUpdates(); updateJ(jsonAdd(sdoc("id", "C4", "_version_","104")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); updateJ(jsonAdd(sdoc("id", "C5", "_version_","105")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); logReplay.release(1000); rinfoFuture = ulog.applyBufferedUpdates(); UpdateLog.RecoveryInfo rinfo = rinfoFuture.get(); assertEquals(2, rinfo.adds); assertJQ(req("qt","/get", "getVersions","2") ,"=={'versions':[105,104]}" ); // this time add some docs first before buffering starts (so tlog won't be at pos 0) updateJ(jsonAdd(sdoc("id","C100", "_version_","200")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); updateJ(jsonAdd(sdoc("id","C101", "_version_","201")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); ulog.bufferUpdates(); updateJ(jsonAdd(sdoc("id","C103", "_version_","203")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); updateJ(jsonAdd(sdoc("id","C104", "_version_","204")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); assertTrue(ulog.dropBufferedUpdates()); ulog.bufferUpdates(); updateJ(jsonAdd(sdoc("id","C105", "_version_","205")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); updateJ(jsonAdd(sdoc("id","C106", "_version_","206")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); rinfoFuture = ulog.applyBufferedUpdates(); rinfo = rinfoFuture.get(); assertEquals(2, rinfo.adds); assertJQ(req("q", "*:*", "sort","_version_ asc", "fl","id,_version_") , "/response/docs==[" + "{'id':'C4','_version_':104}" + ",{'id':'C5','_version_':105}" + ",{'id':'C100','_version_':200}" + ",{'id':'C101','_version_':201}" + ",{'id':'C105','_version_':205}" + ",{'id':'C106','_version_':206}" +"]" ); assertJQ(req("qt","/get", "getVersions","6") ,"=={'versions':[206,205,201,200,105,104]}" ); ulog.bufferUpdates(); assertEquals(UpdateLog.State.BUFFERING, ulog.getState()); updateJ(jsonAdd(sdoc("id","C301", "_version_","998")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); updateJ(jsonAdd(sdoc("id","C302", "_version_","999")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); assertTrue(ulog.dropBufferedUpdates()); // make sure we can overwrite with a lower version // TODO: is this functionality needed? updateJ(jsonAdd(sdoc("id","C301", "_version_","301")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); updateJ(jsonAdd(sdoc("id","C302", "_version_","302")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); assertU(commit()); assertJQ(req("qt","/get", "getVersions","2") ,"=={'versions':[302,301]}" ); assertJQ(req("q", "*:*", "sort","_version_ desc", "fl","id,_version_", "rows","2") , "/response/docs==[" + "{'id':'C302','_version_':302}" + ",{'id':'C301','_version_':301}" +"]" ); updateJ(jsonAdd(sdoc("id","C2", "_version_","302")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); assertEquals(UpdateLog.State.ACTIVE, ulog.getState()); // leave each test method in a good state } finally { DirectUpdateHandler2.commitOnClose = true; UpdateLog.testing_logReplayHook = null; UpdateLog.testing_logReplayFinishHook = null; req().close(); } } @Test public void testBufferingFlags() throws Exception { DirectUpdateHandler2.commitOnClose = false; final Semaphore logReplayFinish = new Semaphore(0); UpdateLog.testing_logReplayFinishHook = new Runnable() { @Override public void run() { logReplayFinish.release(); } }; SolrQueryRequest req = req(); UpdateHandler uhandler = req.getCore().getUpdateHandler(); UpdateLog ulog = uhandler.getUpdateLog(); try { clearIndex(); assertU(commit()); assertEquals(UpdateLog.State.ACTIVE, ulog.getState()); ulog.bufferUpdates(); // simulate updates from a leader updateJ(jsonAdd(sdoc("id","Q1", "_version_","101")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); updateJ(jsonAdd(sdoc("id","Q2", "_version_","102")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); updateJ(jsonAdd(sdoc("id","Q3", "_version_","103")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); assertEquals(UpdateLog.State.BUFFERING, ulog.getState()); req.close(); h.close(); createCore(); req = req(); uhandler = req.getCore().getUpdateHandler(); ulog = uhandler.getUpdateLog(); logReplayFinish.acquire(); // wait for replay to finish assertTrue((ulog.getStartingOperation() & UpdateLog.FLAG_GAP) != 0); // since we died while buffering, we should see this last // // Try again to ensure that the previous log replay didn't wipe out our flags // req.close(); h.close(); createCore(); req = req(); uhandler = req.getCore().getUpdateHandler(); ulog = uhandler.getUpdateLog(); assertTrue((ulog.getStartingOperation() & UpdateLog.FLAG_GAP) != 0); // now do some normal non-buffered adds updateJ(jsonAdd(sdoc("id","Q4", "_version_","114")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); updateJ(jsonAdd(sdoc("id","Q5", "_version_","115")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); updateJ(jsonAdd(sdoc("id","Q6", "_version_","116")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); assertU(commit()); req.close(); h.close(); createCore(); req = req(); uhandler = req.getCore().getUpdateHandler(); ulog = uhandler.getUpdateLog(); assertTrue((ulog.getStartingOperation() & UpdateLog.FLAG_GAP) == 0); ulog.bufferUpdates(); // simulate receiving no updates ulog.applyBufferedUpdates(); updateJ(jsonAdd(sdoc("id","Q7", "_version_","117")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); // do another add to make sure flags are back to normal req.close(); h.close(); createCore(); req = req(); uhandler = req.getCore().getUpdateHandler(); ulog = uhandler.getUpdateLog(); assertTrue((ulog.getStartingOperation() & UpdateLog.FLAG_GAP) == 0); // check flags on Q7 logReplayFinish.acquire(); assertEquals(UpdateLog.State.ACTIVE, ulog.getState()); // leave each test method in a good state } finally { DirectUpdateHandler2.commitOnClose = true; UpdateLog.testing_logReplayHook = null; UpdateLog.testing_logReplayFinishHook = null; req().close(); } } // make sure that on a restart, versions don't start too low @Test public void testVersionsOnRestart() throws Exception { clearIndex(); assertU(commit()); assertU(adoc("id","D1", "val_i","1")); assertU(adoc("id","D2", "val_i","1")); assertU(commit()); long v1 = getVer(req("q","id:D1")); long v1a = getVer(req("q","id:D2")); h.close(); createCore(); assertU(adoc("id","D1", "val_i","2")); assertU(commit()); long v2 = getVer(req("q","id:D1")); assert(v2 > v1); assertJQ(req("qt","/get", "getVersions","2") ,"/versions==[" + v2 + "," + v1a + "]" ); } // make sure that log isn't needlessly replayed after a clean shutdown @Test public void testCleanShutdown() throws Exception { DirectUpdateHandler2.commitOnClose = true; final Semaphore logReplay = new Semaphore(0); final Semaphore logReplayFinish = new Semaphore(0); UpdateLog.testing_logReplayHook = new Runnable() { @Override public void run() { try { assertTrue(logReplay.tryAcquire(timeout, TimeUnit.SECONDS)); } catch (Exception e) { throw new RuntimeException(e); } } }; UpdateLog.testing_logReplayFinishHook = new Runnable() { @Override public void run() { logReplayFinish.release(); } }; SolrQueryRequest req = req(); UpdateHandler uhandler = req.getCore().getUpdateHandler(); UpdateLog ulog = uhandler.getUpdateLog(); try { clearIndex(); assertU(commit()); assertU(adoc("id","E1", "val_i","1")); assertU(adoc("id","E2", "val_i","1")); // set to a high enough number so this test won't hang on a bug logReplay.release(10); h.close(); createCore(); // make sure the docs got committed assertJQ(req("q","*:*"),"/response/numFound==2"); // make sure no replay happened assertEquals(10, logReplay.availablePermits()); } finally { DirectUpdateHandler2.commitOnClose = true; UpdateLog.testing_logReplayHook = null; UpdateLog.testing_logReplayFinishHook = null; req().close(); } } private void addDocs(int nDocs, int start, LinkedList<Long> versions) throws Exception { for (int i=0; i<nDocs; i++) { versions.addFirst( addAndGetVersion( sdoc("id",Integer.toString(start + nDocs)) , null) ); } } @Test public void testRemoveOldLogs() throws Exception { try { DirectUpdateHandler2.commitOnClose = false; final Semaphore logReplay = new Semaphore(0); final Semaphore logReplayFinish = new Semaphore(0); UpdateLog.testing_logReplayHook = new Runnable() { @Override public void run() { try { assertTrue(logReplay.tryAcquire(timeout, TimeUnit.SECONDS)); } catch (Exception e) { throw new RuntimeException(e); } } }; UpdateLog.testing_logReplayFinishHook = new Runnable() { @Override public void run() { logReplayFinish.release(); } }; clearIndex(); assertU(commit()); String logDir = h.getCore().getUpdateHandler().getUpdateLog().getLogDir(); h.close(); String[] files = HdfsUpdateLog.getLogList(fs, new Path(logDir)); for (String file : files) { fs.delete(new Path(logDir, file), false); } assertEquals(0, HdfsUpdateLog.getLogList(fs, new Path(logDir)).length); createCore(); int start = 0; int maxReq = 50; LinkedList<Long> versions = new LinkedList<>(); addDocs(10, start, versions); start+=10; assertJQ(req("qt","/get", "getVersions",""+maxReq), "/versions==" + versions.subList(0,Math.min(maxReq,start))); assertU(commit()); assertJQ(req("qt","/get", "getVersions",""+maxReq), "/versions==" + versions.subList(0,Math.min(maxReq,start))); addDocs(10, start, versions); start+=10; assertJQ(req("qt","/get", "getVersions",""+maxReq), "/versions==" + versions.subList(0,Math.min(maxReq,start))); assertU(commit()); assertJQ(req("qt","/get", "getVersions",""+maxReq), "/versions==" + versions.subList(0,Math.min(maxReq,start))); assertEquals(2, HdfsUpdateLog.getLogList(fs, new Path(logDir)).length); addDocs(105, start, versions); start+=105; assertJQ(req("qt","/get", "getVersions",""+maxReq), "/versions==" + versions.subList(0,Math.min(maxReq,start))); assertU(commit()); assertJQ(req("qt","/get", "getVersions",""+maxReq), "/versions==" + versions.subList(0,Math.min(maxReq,start))); // previous two logs should be gone now assertEquals(1, HdfsUpdateLog.getLogList(fs, new Path(logDir)).length); addDocs(1, start, versions); start+=1; h.close(); createCore(); // trigger recovery, make sure that tlog reference handling is correct // test we can get versions while replay is happening assertJQ(req("qt","/get", "getVersions",""+maxReq), "/versions==" + versions.subList(0,Math.min(maxReq,start))); logReplay.release(1000); assertTrue(logReplayFinish.tryAcquire(timeout, TimeUnit.SECONDS)); assertJQ(req("qt","/get", "getVersions",""+maxReq), "/versions==" + versions.subList(0,Math.min(maxReq,start))); addDocs(105, start, versions); start+=105; assertJQ(req("qt","/get", "getVersions",""+maxReq), "/versions==" + versions.subList(0,Math.min(maxReq,start))); assertU(commit()); assertJQ(req("qt","/get", "getVersions",""+maxReq), "/versions==" + versions.subList(0,Math.min(maxReq,start))); // previous logs should be gone now assertEquals(1, HdfsUpdateLog.getLogList(fs, new Path(logDir)).length); // // test that a corrupt tlog file doesn't stop us from coming up, or seeing versions before that tlog file. // addDocs(1, start, new LinkedList<Long>()); // don't add this to the versions list because we are going to lose it... h.close(); files = HdfsUpdateLog.getLogList(fs, new Path(logDir));; Arrays.sort(files); FSDataOutputStream dos = fs.create(new Path(new Path(logDir), files[files.length-1]), (short)1); dos.writeUTF("This is a trashed log file that really shouldn't work at all, but we'll see.."); dos.close(); ignoreException("Failure to open existing"); createCore(); // we should still be able to get the list of versions (not including the trashed log file) assertJQ(req("qt", "/get", "getVersions", "" + maxReq), "/versions==" + versions.subList(0, Math.min(maxReq, start))); resetExceptionIgnores(); } finally { DirectUpdateHandler2.commitOnClose = true; UpdateLog.testing_logReplayHook = null; UpdateLog.testing_logReplayFinishHook = null; } } // // test that a partially written last tlog entry (that will cause problems for both reverse reading and for // log replay) doesn't stop us from coming up, and from recovering the documents that were not cut off. // @Test public void testTruncatedLog() throws Exception { try { DirectUpdateHandler2.commitOnClose = false; final Semaphore logReplay = new Semaphore(0); final Semaphore logReplayFinish = new Semaphore(0); UpdateLog.testing_logReplayHook = new Runnable() { @Override public void run() { try { assertTrue(logReplay.tryAcquire(timeout, TimeUnit.SECONDS)); } catch (Exception e) { throw new RuntimeException(e); } } }; UpdateLog.testing_logReplayFinishHook = new Runnable() { @Override public void run() { logReplayFinish.release(); } }; String logDir = h.getCore().getUpdateHandler().getUpdateLog().getLogDir(); clearIndex(); assertU(commit()); assertU(adoc("id","F1")); assertU(adoc("id","F2")); assertU(adoc("id","F3")); h.close(); String[] files = HdfsUpdateLog.getLogList(fs, new Path(logDir)); Arrays.sort(files); FSDataOutputStream dos = fs.append(new Path(logDir, files[files.length-1])); dos.writeLong(0xffffffffffffffffL); dos.writeChars("This should be appended to a good log file, representing a bad partially written record."); dos.close(); logReplay.release(1000); logReplayFinish.drainPermits(); ignoreException("OutOfBoundsException"); // this is what the corrupted log currently produces... subject to change. createCore(); assertTrue(logReplayFinish.tryAcquire(timeout, TimeUnit.SECONDS)); resetExceptionIgnores(); assertJQ(req("q","*:*") ,"/response/numFound==3"); // // Now test that the bad log file doesn't mess up retrieving latest versions // updateJ(jsonAdd(sdoc("id","F4", "_version_","104")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); updateJ(jsonAdd(sdoc("id","F5", "_version_","105")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); updateJ(jsonAdd(sdoc("id","F6", "_version_","106")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); // This currently skips the bad log file and also returns the version of the clearIndex (del *:*) // assertJQ(req("qt","/get", "getVersions","6"), "/versions==[106,105,104]"); assertJQ(req("qt","/get", "getVersions","3"), "/versions==[106,105,104]"); } finally { DirectUpdateHandler2.commitOnClose = true; UpdateLog.testing_logReplayHook = null; UpdateLog.testing_logReplayFinishHook = null; } } // // test that a corrupt tlog doesn't stop us from coming up // @Test public void testCorruptLog() throws Exception { try { DirectUpdateHandler2.commitOnClose = false; String logDir = h.getCore().getUpdateHandler().getUpdateLog().getLogDir(); clearIndex(); assertU(commit()); assertU(adoc("id","G1")); assertU(adoc("id","G2")); assertU(adoc("id","G3")); h.close(); String[] files = HdfsUpdateLog.getLogList(fs, new Path(logDir)); Arrays.sort(files); FSDataOutputStream dos = fs.create(new Path(logDir, files[files.length-1]), (short)1); dos.write(new byte[(int)800]); // zero out file dos.close(); ignoreException("Failure to open existing log file"); // this is what the corrupted log currently produces... subject to change. createCore(); resetExceptionIgnores(); // just make sure it responds assertJQ(req("q","*:*") ,"/response/numFound==0"); // // Now test that the bad log file doesn't mess up retrieving latest versions // updateJ(jsonAdd(sdoc("id","G4", "_version_","104")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); updateJ(jsonAdd(sdoc("id","G5", "_version_","105")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); updateJ(jsonAdd(sdoc("id","G6", "_version_","106")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); // This currently skips the bad log file and also returns the version of the clearIndex (del *:*) // assertJQ(req("qt","/get", "getVersions","6"), "/versions==[106,105,104]"); assertJQ(req("qt","/get", "getVersions","3"), "/versions==[106,105,104]"); assertU(commit()); assertJQ(req("q","*:*") ,"/response/numFound==3"); // This messes up some other tests (on windows) if we don't remove the bad log. // This *should* hopefully just be because the tests are too fragile and not because of real bugs - but it should be investigated further. deleteLogs(); } finally { DirectUpdateHandler2.commitOnClose = true; UpdateLog.testing_logReplayHook = null; UpdateLog.testing_logReplayFinishHook = null; } } // in rare circumstances, two logs can be left uncapped (lacking a commit at the end signifying that all the content in the log was committed) @Test public void testRecoveryMultipleLogs() throws Exception { try { DirectUpdateHandler2.commitOnClose = false; final Semaphore logReplay = new Semaphore(0); final Semaphore logReplayFinish = new Semaphore(0); UpdateLog.testing_logReplayHook = new Runnable() { @Override public void run() { try { assertTrue(logReplay.tryAcquire(timeout, TimeUnit.SECONDS)); } catch (Exception e) { throw new RuntimeException(e); } } }; UpdateLog.testing_logReplayFinishHook = new Runnable() { @Override public void run() { logReplayFinish.release(); } }; String logDir = h.getCore().getUpdateHandler().getUpdateLog().getLogDir(); clearIndex(); assertU(commit()); assertU(adoc("id","AAAAAA")); assertU(adoc("id","BBBBBB")); assertU(adoc("id","CCCCCC")); h.close(); String[] files = HdfsUpdateLog.getLogList(fs, new Path(logDir)); Arrays.sort(files); String fname = files[files.length-1]; FSDataOutputStream dos = fs.append(new Path(logDir, files[files.length-1])); dos.writeLong(0xffffffffffffffffL); dos.writeChars("This should be appended to a good log file, representing a bad partially written record."); dos.close(); FSDataInputStream dis = fs.open(new Path(logDir, files[files.length-1])); byte[] content = new byte[(int)dis.available()]; dis.readFully(content); dis.close(); // Now make a newer log file with just the IDs changed. NOTE: this may not work if log format changes too much! findReplace("AAAAAA".getBytes(StandardCharsets.UTF_8), "aaaaaa".getBytes(StandardCharsets.UTF_8), content); findReplace("BBBBBB".getBytes(StandardCharsets.UTF_8), "bbbbbb".getBytes(StandardCharsets.UTF_8), content); findReplace("CCCCCC".getBytes(StandardCharsets.UTF_8), "cccccc".getBytes(StandardCharsets.UTF_8), content); // WARNING... assumes format of .00000n where n is less than 9 long logNumber = Long.parseLong(fname.substring(fname.lastIndexOf(".") + 1)); String fname2 = String.format(Locale.ROOT, UpdateLog.LOG_FILENAME_PATTERN, UpdateLog.TLOG_NAME, logNumber + 1); dos = fs.create(new Path(logDir, fname2), (short)1); dos.write(content); dos.close(); logReplay.release(1000); logReplayFinish.drainPermits(); ignoreException("OutOfBoundsException"); // this is what the corrupted log currently produces... subject to change. createCore(); assertTrue(logReplayFinish.tryAcquire(timeout, TimeUnit.SECONDS)); resetExceptionIgnores(); assertJQ(req("q","*:*") ,"/response/numFound==6"); } finally { DirectUpdateHandler2.commitOnClose = true; UpdateLog.testing_logReplayHook = null; UpdateLog.testing_logReplayFinishHook = null; } } // NOTE: replacement must currently be same size private static void findReplace(byte[] from, byte[] to, byte[] data) { int idx = -from.length; for(;;) { idx = indexOf(from, data, idx + from.length); // skip over previous match if (idx < 0) break; for (int i=0; i<to.length; i++) { data[idx+i] = to[i]; } } } private static int indexOf(byte[] target, byte[] data, int start) { outer: for (int i=start; i<data.length - target.length; i++) { for (int j=0; j<target.length; j++) { if (data[i+j] != target[j]) continue outer; } return i; } return -1; } // stops the core, removes the transaction logs, restarts the core. void deleteLogs() throws Exception { String logDir = h.getCore().getUpdateHandler().getUpdateLog().getLogDir(); h.close(); try { String[] files = HdfsUpdateLog.getLogList(fs, new Path(logDir)); for (String file : files) { //new File(logDir, file).delete(); fs.delete(new Path(logDir, file), false); } assertEquals(0, HdfsUpdateLog.getLogList(fs, new Path(logDir)).length); } finally { // make sure we create the core again, even if the assert fails so it won't mess // up the next test. createCore(); assertJQ(req("q","*:*") ,"/response/numFound=="); // ensure it works } } private static Long getVer(SolrQueryRequest req) throws Exception { String response = JQ(req); Map rsp = (Map) ObjectBuilder.fromJSON(response); Map doc = null; if (rsp.containsKey("doc")) { doc = (Map)rsp.get("doc"); } else if (rsp.containsKey("docs")) { List lst = (List)rsp.get("docs"); if (lst.size() > 0) { doc = (Map)lst.get(0); } } else if (rsp.containsKey("response")) { Map responseMap = (Map)rsp.get("response"); List lst = (List)responseMap.get("docs"); if (lst.size() > 0) { doc = (Map)lst.get(0); } } if (doc == null) return null; return (Long)doc.get("_version_"); } }
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.ecs.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * An object representing a container instance or task attachment. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/ecs-2014-11-13/Attachment" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class Attachment implements Serializable, Cloneable, StructuredPojo { /** * <p> * The unique identifier for the attachment. * </p> */ private String id; /** * <p> * The type of the attachment, such as <code>ElasticNetworkInterface</code>. * </p> */ private String type; /** * <p> * The status of the attachment. Valid values are <code>PRECREATED</code>, <code>CREATED</code>, * <code>ATTACHING</code>, <code>ATTACHED</code>, <code>DETACHING</code>, <code>DETACHED</code>, and * <code>DELETED</code>. * </p> */ private String status; /** * <p> * Details of the attachment. For elastic network interfaces, this includes the network interface ID, the MAC * address, the subnet ID, and the private IPv4 address. * </p> */ private com.amazonaws.internal.SdkInternalList<KeyValuePair> details; /** * <p> * The unique identifier for the attachment. * </p> * * @param id * The unique identifier for the attachment. */ public void setId(String id) { this.id = id; } /** * <p> * The unique identifier for the attachment. * </p> * * @return The unique identifier for the attachment. */ public String getId() { return this.id; } /** * <p> * The unique identifier for the attachment. * </p> * * @param id * The unique identifier for the attachment. * @return Returns a reference to this object so that method calls can be chained together. */ public Attachment withId(String id) { setId(id); return this; } /** * <p> * The type of the attachment, such as <code>ElasticNetworkInterface</code>. * </p> * * @param type * The type of the attachment, such as <code>ElasticNetworkInterface</code>. */ public void setType(String type) { this.type = type; } /** * <p> * The type of the attachment, such as <code>ElasticNetworkInterface</code>. * </p> * * @return The type of the attachment, such as <code>ElasticNetworkInterface</code>. */ public String getType() { return this.type; } /** * <p> * The type of the attachment, such as <code>ElasticNetworkInterface</code>. * </p> * * @param type * The type of the attachment, such as <code>ElasticNetworkInterface</code>. * @return Returns a reference to this object so that method calls can be chained together. */ public Attachment withType(String type) { setType(type); return this; } /** * <p> * The status of the attachment. Valid values are <code>PRECREATED</code>, <code>CREATED</code>, * <code>ATTACHING</code>, <code>ATTACHED</code>, <code>DETACHING</code>, <code>DETACHED</code>, and * <code>DELETED</code>. * </p> * * @param status * The status of the attachment. Valid values are <code>PRECREATED</code>, <code>CREATED</code>, * <code>ATTACHING</code>, <code>ATTACHED</code>, <code>DETACHING</code>, <code>DETACHED</code>, and * <code>DELETED</code>. */ public void setStatus(String status) { this.status = status; } /** * <p> * The status of the attachment. Valid values are <code>PRECREATED</code>, <code>CREATED</code>, * <code>ATTACHING</code>, <code>ATTACHED</code>, <code>DETACHING</code>, <code>DETACHED</code>, and * <code>DELETED</code>. * </p> * * @return The status of the attachment. Valid values are <code>PRECREATED</code>, <code>CREATED</code>, * <code>ATTACHING</code>, <code>ATTACHED</code>, <code>DETACHING</code>, <code>DETACHED</code>, and * <code>DELETED</code>. */ public String getStatus() { return this.status; } /** * <p> * The status of the attachment. Valid values are <code>PRECREATED</code>, <code>CREATED</code>, * <code>ATTACHING</code>, <code>ATTACHED</code>, <code>DETACHING</code>, <code>DETACHED</code>, and * <code>DELETED</code>. * </p> * * @param status * The status of the attachment. Valid values are <code>PRECREATED</code>, <code>CREATED</code>, * <code>ATTACHING</code>, <code>ATTACHED</code>, <code>DETACHING</code>, <code>DETACHED</code>, and * <code>DELETED</code>. * @return Returns a reference to this object so that method calls can be chained together. */ public Attachment withStatus(String status) { setStatus(status); return this; } /** * <p> * Details of the attachment. For elastic network interfaces, this includes the network interface ID, the MAC * address, the subnet ID, and the private IPv4 address. * </p> * * @return Details of the attachment. For elastic network interfaces, this includes the network interface ID, the * MAC address, the subnet ID, and the private IPv4 address. */ public java.util.List<KeyValuePair> getDetails() { if (details == null) { details = new com.amazonaws.internal.SdkInternalList<KeyValuePair>(); } return details; } /** * <p> * Details of the attachment. For elastic network interfaces, this includes the network interface ID, the MAC * address, the subnet ID, and the private IPv4 address. * </p> * * @param details * Details of the attachment. For elastic network interfaces, this includes the network interface ID, the MAC * address, the subnet ID, and the private IPv4 address. */ public void setDetails(java.util.Collection<KeyValuePair> details) { if (details == null) { this.details = null; return; } this.details = new com.amazonaws.internal.SdkInternalList<KeyValuePair>(details); } /** * <p> * Details of the attachment. For elastic network interfaces, this includes the network interface ID, the MAC * address, the subnet ID, and the private IPv4 address. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setDetails(java.util.Collection)} or {@link #withDetails(java.util.Collection)} if you want to override * the existing values. * </p> * * @param details * Details of the attachment. For elastic network interfaces, this includes the network interface ID, the MAC * address, the subnet ID, and the private IPv4 address. * @return Returns a reference to this object so that method calls can be chained together. */ public Attachment withDetails(KeyValuePair... details) { if (this.details == null) { setDetails(new com.amazonaws.internal.SdkInternalList<KeyValuePair>(details.length)); } for (KeyValuePair ele : details) { this.details.add(ele); } return this; } /** * <p> * Details of the attachment. For elastic network interfaces, this includes the network interface ID, the MAC * address, the subnet ID, and the private IPv4 address. * </p> * * @param details * Details of the attachment. For elastic network interfaces, this includes the network interface ID, the MAC * address, the subnet ID, and the private IPv4 address. * @return Returns a reference to this object so that method calls can be chained together. */ public Attachment withDetails(java.util.Collection<KeyValuePair> details) { setDetails(details); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getId() != null) sb.append("Id: ").append(getId()).append(","); if (getType() != null) sb.append("Type: ").append(getType()).append(","); if (getStatus() != null) sb.append("Status: ").append(getStatus()).append(","); if (getDetails() != null) sb.append("Details: ").append(getDetails()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof Attachment == false) return false; Attachment other = (Attachment) obj; if (other.getId() == null ^ this.getId() == null) return false; if (other.getId() != null && other.getId().equals(this.getId()) == false) return false; if (other.getType() == null ^ this.getType() == null) return false; if (other.getType() != null && other.getType().equals(this.getType()) == false) return false; if (other.getStatus() == null ^ this.getStatus() == null) return false; if (other.getStatus() != null && other.getStatus().equals(this.getStatus()) == false) return false; if (other.getDetails() == null ^ this.getDetails() == null) return false; if (other.getDetails() != null && other.getDetails().equals(this.getDetails()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getId() == null) ? 0 : getId().hashCode()); hashCode = prime * hashCode + ((getType() == null) ? 0 : getType().hashCode()); hashCode = prime * hashCode + ((getStatus() == null) ? 0 : getStatus().hashCode()); hashCode = prime * hashCode + ((getDetails() == null) ? 0 : getDetails().hashCode()); return hashCode; } @Override public Attachment clone() { try { return (Attachment) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.ecs.model.transform.AttachmentMarshaller.getInstance().marshall(this, protocolMarshaller); } }
/* * JasperReports - Free Java Reporting Library. * Copyright (C) 2001 - 2014 TIBCO Software Inc. All rights reserved. * http://www.jaspersoft.com * * Unless you have purchased a commercial license agreement from Jaspersoft, * the following license terms apply: * * This program is part of JasperReports. * * JasperReports is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * JasperReports is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with JasperReports. If not, see <http://www.gnu.org/licenses/>. */ package net.sf.jasperreports.ant; import java.io.File; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import net.sf.jasperreports.engine.JRException; import net.sf.jasperreports.engine.JasperExportManager; import net.sf.jasperreports.engine.JasperPrint; import net.sf.jasperreports.engine.design.JRCompiler; import net.sf.jasperreports.engine.util.JRLoader; import org.apache.tools.ant.AntClassLoader; import org.apache.tools.ant.BuildException; import org.apache.tools.ant.DirectoryScanner; import org.apache.tools.ant.types.Path; import org.apache.tools.ant.types.Resource; import org.apache.tools.ant.types.resources.FileResource; import org.apache.tools.ant.util.RegexpPatternMapper; import org.apache.tools.ant.util.SourceFileScanner; /** * Ant task for batch-exporting generated report files. * Works like the built-in <code>javac</code> Ant task. * <p> * This task can take the following arguments: * <ul> * <li>src * <li>destdir * </ul> * Of these arguments, the <code>src</code> and <code>destdir</code> are required. * When this task executes, it will recursively scan the <code>src</code> and * <code>destdir</code> looking for generated report files to export. * This task makes its export decision based on timestamp and only JRPRINT files * that have no corresponding file in the target directory or where the destination report * design file is older than the source file will be exported. * * @author Teodor Danciu (teodord@users.sourceforge.net) * @version $Id: JRAntXmlExportTask.java 7199 2014-08-27 13:58:10Z teodord $ */ public class JRAntXmlExportTask extends JRBaseAntTask { /** * */ private Path src; private File destdir; private Path classpath; private Map<String, String> reportFilesMap; /** * Sets the source directories to find the XML report design files. * * @param srcdir source path */ public void setSrcdir(Path srcdir) { if (src == null) { src = srcdir; } else { src.append(srcdir); } } /** * Adds a path for export source. * * @return source path */ public Path createSrc() { if (src == null) { src = new Path(getProject()); } return src.createPath(); } /** * Sets the destination directory into which the report files should be exported. * * @param destdir destination directory */ public void setDestdir(File destdir) { this.destdir = destdir; } /** * Adds a path to the classpath. * * @return classpath to use when updating the report */ public Path createClasspath() { if (classpath == null) { classpath = new Path(getProject()); } return classpath.createPath(); } /** * Executes the task. */ public void execute() throws BuildException { checkParameters(); reportFilesMap = new HashMap<String, String>(); //JRProperties.setProperty(JRProperties.COMPILER_XML_VALIDATION, xmlvalidation);//FIXMECONTEXT is this needed? what about the one below? AntClassLoader classLoader = null; if (classpath != null) { jasperReportsContext.setProperty(JRCompiler.COMPILER_CLASSPATH, String.valueOf(classpath)); ClassLoader parentClassLoader = getClass().getClassLoader(); classLoader = new AntClassLoader(parentClassLoader, getProject(), classpath, true); classLoader.setThreadContextLoader(); } try { /* */ scanSrc(); /* */ export(); } finally { if (classLoader != null) { classLoader.resetThreadContextLoader(); } } } /** * Checks that all required attributes have been set and that the supplied values are valid. */ protected void checkParameters() throws BuildException { if (src == null || src.size() == 0) { throw new BuildException( "The srcdir attribute must be set.", getLocation() ); } if (destdir != null && !destdir.isDirectory()) { throw new BuildException( "The destination directory \"" + destdir + "\" does not exist " + "or is not a directory.", getLocation() ); } } /** * Scans the source directories looking for source files to be exported. */ protected void scanSrc() throws BuildException { for(@SuppressWarnings("unchecked") Iterator<Resource> it = src.iterator(); it.hasNext();) { Resource resource = it.next(); FileResource fileResource = resource instanceof FileResource ? (FileResource)resource : null; if (fileResource != null) { File file = fileResource.getFile(); if (file.isDirectory()) { DirectoryScanner ds = getDirectoryScanner(file); String[] files = ds.getIncludedFiles(); scanDir(file, destdir != null ? destdir : file, files); } else { String[] files = new String[]{fileResource.getName()}; scanDir(fileResource.getBaseDir(), destdir != null ? destdir : fileResource.getBaseDir(), files); } } // else // { // //FIXME what to do? // } } } /** * Scans the directory looking for source files to be exported. * The results are returned in the instance variable <code>reportFilesMap</code>. * * @param srcdir source directory * @param destdir destination directory * @param files included file names */ protected void scanDir(File srcdir, File destdir, String[] files) { RegexpPatternMapper mapper = new RegexpPatternMapper(); mapper.setFrom("^(.*)\\.(.*)$"); mapper.setTo("\\1.jrpxml"); SourceFileScanner scanner = new SourceFileScanner(this); String[] newFiles = scanner.restrict(files, srcdir, destdir, mapper); if (newFiles != null && newFiles.length > 0) { for (int i = 0; i < newFiles.length; i++) { reportFilesMap.put( (new File(srcdir, newFiles[i])).getAbsolutePath(), (new File(destdir, mapper.mapFileName(newFiles[i])[0])).getAbsolutePath() ); } } } /** * Performs the export of the selected report files. */ protected void export() throws BuildException { Collection<String> files = reportFilesMap.keySet(); if (files != null && files.size() > 0) { boolean isError = false; System.out.println("Exporting " + files.size() + " report files."); String srcFileName = null; String destFileName = null; File destFileParent = null; for (Iterator<String> it = files.iterator(); it.hasNext();) { srcFileName = it.next(); destFileName = reportFilesMap.get(srcFileName); destFileParent = new File(destFileName).getParentFile(); if(!destFileParent.exists()) { destFileParent.mkdirs(); } try { System.out.print("File : " + srcFileName + " ... "); JasperPrint jasperPrint = (JasperPrint)JRLoader.loadObjectFromFile(srcFileName); JasperExportManager.getInstance(jasperReportsContext).exportToXmlFile(jasperPrint, destFileName, false); System.out.println("OK."); } catch(JRException e) { System.out.println("FAILED."); System.out.println("Error updating report design : " + srcFileName); e.printStackTrace(System.out); isError = true; } } if(isError) { throw new BuildException("Errors were encountered when updating report designs."); } } } }
/* * Copyright (C) 2014 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.example.android.sunshine.app.data; import android.annotation.TargetApi; import android.content.ContentProvider; import android.content.ContentValues; import android.content.UriMatcher; import android.database.Cursor; import android.database.sqlite.SQLiteDatabase; import android.database.sqlite.SQLiteQueryBuilder; import android.net.Uri; public class WeatherProvider extends ContentProvider { static final int WEATHER = 100; static final int WEATHER_WITH_LOCATION = 101; static final int WEATHER_WITH_LOCATION_AND_DATE = 102; static final int LOCATION = 300; // The URI Matcher used by this content provider. private static final UriMatcher sUriMatcher = buildUriMatcher(); private static final SQLiteQueryBuilder sWeatherByLocationSettingQueryBuilder; //location.location_setting = ? private static final String sLocationSettingSelection = WeatherContract.LocationEntry.TABLE_NAME + "." + WeatherContract.LocationEntry.COLUMN_LOCATION_SETTING + " = ? "; //location.location_setting = ? AND date >= ? private static final String sLocationSettingWithStartDateSelection = WeatherContract.LocationEntry.TABLE_NAME + "." + WeatherContract.LocationEntry.COLUMN_LOCATION_SETTING + " = ? AND " + WeatherContract.WeatherEntry.COLUMN_DATE + " >= ? "; //location.location_setting = ? AND date = ? private static final String sLocationSettingAndDaySelection = WeatherContract.LocationEntry.TABLE_NAME + "." + WeatherContract.LocationEntry.COLUMN_LOCATION_SETTING + " = ? AND " + WeatherContract.WeatherEntry.COLUMN_DATE + " = ? "; static { sWeatherByLocationSettingQueryBuilder = new SQLiteQueryBuilder(); //This is an inner join which looks like //weather INNER JOIN location ON weather.location_id = location._id sWeatherByLocationSettingQueryBuilder.setTables( WeatherContract.WeatherEntry.TABLE_NAME + " INNER JOIN " + WeatherContract.LocationEntry.TABLE_NAME + " ON " + WeatherContract.WeatherEntry.TABLE_NAME + "." + WeatherContract.WeatherEntry.COLUMN_LOC_KEY + " = " + WeatherContract.LocationEntry.TABLE_NAME + "." + WeatherContract.LocationEntry._ID); } private WeatherDbHelper mOpenHelper; /* Students: Here is where you need to create the UriMatcher. This UriMatcher will match each URI to the WEATHER, WEATHER_WITH_LOCATION, WEATHER_WITH_LOCATION_AND_DATE, and LOCATION integer constants defined above. You can test this by uncommenting the testUriMatcher test within TestUriMatcher. */ static UriMatcher buildUriMatcher() { // I know what you're thinking. Why create a UriMatcher when you can use regular // expressions instead? Because you're not crazy, that's why. // All paths added to the UriMatcher have a corresponding code to return when a match is // found. The code passed into the constructor represents the code to return for the root // URI. It's common to use NO_MATCH as the code for this case. final UriMatcher matcher = new UriMatcher(UriMatcher.NO_MATCH); final String authority = WeatherContract.CONTENT_AUTHORITY; // For each type of URI you want to add, create a corresponding code. matcher.addURI(authority, WeatherContract.PATH_WEATHER, WEATHER); matcher.addURI(authority, WeatherContract.PATH_WEATHER + "/*", WEATHER_WITH_LOCATION); matcher.addURI(authority, WeatherContract.PATH_WEATHER + "/*/#", WEATHER_WITH_LOCATION_AND_DATE); matcher.addURI(authority, WeatherContract.PATH_LOCATION, LOCATION); return matcher; } private Cursor getWeatherByLocationSetting(Uri uri, String[] projection, String sortOrder) { String locationSetting = WeatherContract.WeatherEntry.getLocationSettingFromUri(uri); long startDate = WeatherContract.WeatherEntry.getStartDateFromUri(uri); String[] selectionArgs; String selection; if (startDate == 0) { selection = sLocationSettingSelection; selectionArgs = new String[]{locationSetting}; } else { selectionArgs = new String[]{locationSetting, Long.toString(startDate)}; selection = sLocationSettingWithStartDateSelection; } return sWeatherByLocationSettingQueryBuilder.query(mOpenHelper.getReadableDatabase(), projection, selection, selectionArgs, null, null, sortOrder ); } private Cursor getWeatherByLocationSettingAndDate( Uri uri, String[] projection, String sortOrder) { String locationSetting = WeatherContract.WeatherEntry.getLocationSettingFromUri(uri); long date = WeatherContract.WeatherEntry.getDateFromUri(uri); return sWeatherByLocationSettingQueryBuilder.query(mOpenHelper.getReadableDatabase(), projection, sLocationSettingAndDaySelection, new String[]{locationSetting, Long.toString(date)}, null, null, sortOrder ); } /* Students: We've coded this for you. We just create a new WeatherDbHelper for later use here. */ @Override public boolean onCreate() { mOpenHelper = new WeatherDbHelper(getContext()); return true; } /* Students: Here's where you'll code the getType function that uses the UriMatcher. You can test this by uncommenting testGetType in TestProvider. */ @Override public String getType(Uri uri) { // Use the Uri Matcher to determine what kind of URI this is. final int match = sUriMatcher.match(uri); switch (match) { // Student: Uncomment and fill out these two cases case WEATHER_WITH_LOCATION_AND_DATE: return WeatherContract.WeatherEntry.CONTENT_ITEM_TYPE; case WEATHER_WITH_LOCATION: return WeatherContract.WeatherEntry.CONTENT_TYPE; case WEATHER: return WeatherContract.WeatherEntry.CONTENT_TYPE; case LOCATION: return WeatherContract.LocationEntry.CONTENT_TYPE; default: throw new UnsupportedOperationException("Unknown uri: " + uri); } } @Override public Cursor query(Uri uri, String[] projection, String selection, String[] selectionArgs, String sortOrder) { // Here's the switch statement that, given a URI, will determine what kind of request it is, // and query the database accordingly. Cursor retCursor; switch (sUriMatcher.match(uri)) { // "weather/*/*" case WEATHER_WITH_LOCATION_AND_DATE: { retCursor = getWeatherByLocationSettingAndDate(uri, projection, sortOrder); break; } // "weather/*" case WEATHER_WITH_LOCATION: { retCursor = getWeatherByLocationSetting(uri, projection, sortOrder); break; } // "weather" case WEATHER: { retCursor = mOpenHelper.getReadableDatabase().query( WeatherContract.WeatherEntry.TABLE_NAME, projection, selection, selectionArgs, null, null, sortOrder ); break; } // "location" case LOCATION: { retCursor = mOpenHelper.getReadableDatabase().query( WeatherContract.LocationEntry.TABLE_NAME, projection, selection, selectionArgs, null, null, sortOrder ); break; } default: throw new UnsupportedOperationException("Unknown uri: " + uri); } retCursor.setNotificationUri(getContext().getContentResolver(), uri); return retCursor; } /* Student: Add the ability to insert Locations to the implementation of this function. */ @Override public Uri insert(Uri uri, ContentValues values) { final SQLiteDatabase db = mOpenHelper.getWritableDatabase(); final int match = sUriMatcher.match(uri); Uri returnUri; switch (match) { case WEATHER: { normalizeDate(values); long _id = db.insert(WeatherContract.WeatherEntry.TABLE_NAME, null, values); if (_id > 0) returnUri = WeatherContract.WeatherEntry.buildWeatherUri(_id); else throw new android.database.SQLException("Failed to insert row into " + uri); break; } case LOCATION: { long _id = db.insert(WeatherContract.LocationEntry.TABLE_NAME, null, values); if (_id > 0) returnUri = WeatherContract.LocationEntry.buildLocationUri(_id); else throw new android.database.SQLException("Failed to insert row into " + uri); break; } default: throw new UnsupportedOperationException("Unknown uri: " + uri); } getContext().getContentResolver().notifyChange(uri, null); return returnUri; } @Override public int delete(Uri uri, String selection, String[] selectionArgs) { final SQLiteDatabase db = mOpenHelper.getWritableDatabase(); final int match = sUriMatcher.match(uri); int rowsDeleted; // this makes delete all rows return the number of rows deleted if (null == selection) selection = "1"; switch (match) { case WEATHER: rowsDeleted = db.delete( WeatherContract.WeatherEntry.TABLE_NAME, selection, selectionArgs); break; case LOCATION: rowsDeleted = db.delete( WeatherContract.LocationEntry.TABLE_NAME, selection, selectionArgs); break; default: throw new UnsupportedOperationException("Unknown uri: " + uri); } // Because a null deletes all rows if (rowsDeleted != 0) { getContext().getContentResolver().notifyChange(uri, null); } return rowsDeleted; } private void normalizeDate(ContentValues values) { // normalize the date value if (values.containsKey(WeatherContract.WeatherEntry.COLUMN_DATE)) { long dateValue = values.getAsLong(WeatherContract.WeatherEntry.COLUMN_DATE); values.put(WeatherContract.WeatherEntry.COLUMN_DATE, WeatherContract.normalizeDate(dateValue)); } } @Override public int update( Uri uri, ContentValues values, String selection, String[] selectionArgs) { final SQLiteDatabase db = mOpenHelper.getWritableDatabase(); final int match = sUriMatcher.match(uri); int rowsUpdated; switch (match) { case WEATHER: normalizeDate(values); rowsUpdated = db.update(WeatherContract.WeatherEntry.TABLE_NAME, values, selection, selectionArgs); break; case LOCATION: rowsUpdated = db.update(WeatherContract.LocationEntry.TABLE_NAME, values, selection, selectionArgs); break; default: throw new UnsupportedOperationException("Unknown uri: " + uri); } if (rowsUpdated != 0) { getContext().getContentResolver().notifyChange(uri, null); } return rowsUpdated; } @Override public int bulkInsert(Uri uri, ContentValues[] values) { final SQLiteDatabase db = mOpenHelper.getWritableDatabase(); final int match = sUriMatcher.match(uri); switch (match) { case WEATHER: db.beginTransaction(); int returnCount = 0; try { for (ContentValues value : values) { normalizeDate(value); long _id = db.insert(WeatherContract.WeatherEntry.TABLE_NAME, null, value); if (_id != -1) { returnCount++; } } db.setTransactionSuccessful(); } finally { db.endTransaction(); } getContext().getContentResolver().notifyChange(uri, null); return returnCount; default: return super.bulkInsert(uri, values); } } // You do not need to call this method. This is a method specifically to assist the testing // framework in running smoothly. You can read more at: // http://developer.android.com/reference/android/content/ContentProvider.html#shutdown() @Override @TargetApi(11) public void shutdown() { mOpenHelper.close(); super.shutdown(); } }
/* * Copyright (c) 2009 - 2017 - Pierre-Laurent Coirer, Frank Hossfeld * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.mvp4g.rebind.test_tools.annotation; import com.mvp4g.client.DefaultMvp4gGinModule; import com.mvp4g.client.annotation.Debug; import com.mvp4g.client.annotation.Debug.LogLevel; import com.mvp4g.client.annotation.Event; import com.mvp4g.client.annotation.Filters; import com.mvp4g.client.annotation.Forward; import com.mvp4g.client.annotation.InitHistory; import com.mvp4g.client.annotation.NotFoundHistory; import com.mvp4g.client.annotation.PlaceService; import com.mvp4g.client.annotation.Start; import com.mvp4g.client.annotation.module.AfterLoadChildModule; import com.mvp4g.client.annotation.module.BeforeLoadChildModule; import com.mvp4g.client.annotation.module.ChildModule; import com.mvp4g.client.annotation.module.ChildModules; import com.mvp4g.client.annotation.module.DisplayChildModuleView; import com.mvp4g.client.annotation.module.LoadChildModuleError; import com.mvp4g.client.event.EventBus; import com.mvp4g.client.event.EventBusWithLookup; import com.mvp4g.client.event.Mvp4gLogger; import com.mvp4g.client.presenter.NoStartPresenter; import com.mvp4g.rebind.test_tools.CustomPlaceService; import com.mvp4g.rebind.test_tools.Modules; import com.mvp4g.rebind.test_tools.annotation.gin.OneGinModule; import com.mvp4g.rebind.test_tools.annotation.presenters.PresenterWithName; import com.mvp4g.rebind.test_tools.annotation.presenters.SimplePresenter01; public class Events { @com.mvp4g.client.annotation.Events(startPresenter = SimplePresenter01.class) public static interface NotEventBus { } @com.mvp4g.client.annotation.Events(startPresenter = SimplePresenter01.class) public static interface SimpleEventBus extends EventBus { } @com.mvp4g.client.annotation.Events(startPresenter = SimplePresenter01.class) public static interface EventBusWithLookUp extends EventBusWithLookup { } @com.mvp4g.client.annotation.Events(startPresenter = PresenterWithName.class, startPresenterName = "name") public static interface EventBusWithStartName extends EventBus { } @com.mvp4g.client.annotation.Events(startPresenter = SimplePresenter01.class, startPresenterName = "name") public static interface EventBusWithStartNameAndWrongClass extends EventBus { } @com.mvp4g.client.annotation.Events(startPresenter = SimplePresenter01.class) public static interface EventBusWithMethodAndNoAnnotation extends EventBus { public void event(String obj); } @com.mvp4g.client.annotation.Events(startPresenter = SimplePresenter01.class) public static interface EventBusWithSameMethod extends EventBus { @Event public void event(String obj); @Event public void event(); } @com.mvp4g.client.annotation.Events(startPresenter = PresenterWithName.class) public static interface EventBusDoubleStart extends EventBus { @Start @Event(handlerNames = "name", calledMethod = "treatEvent1") public void event1(String obj); @Start @Event(handlers = PresenterWithName.class) public void event2(); } @com.mvp4g.client.annotation.Events(startPresenter = PresenterWithName.class) public static interface EventBusDoubleForward extends EventBus { @Forward @Event(handlerNames = "name", calledMethod = "treatEvent1") public void event1(String obj); @Forward @Event(handlers = PresenterWithName.class) public void event2(); } @com.mvp4g.client.annotation.Events(startPresenter = PresenterWithName.class) public static interface EventBusDoubleInitHistory extends EventBus { @InitHistory @Event(handlerNames = "name", calledMethod = "treatEvent1") public void event1(String obj); @InitHistory @Event(handlers = PresenterWithName.class) public void event2(); } @com.mvp4g.client.annotation.Events(startPresenter = PresenterWithName.class) public static interface EventBusDoubleNotFoundHistory extends EventBus { @NotFoundHistory @Event(handlerNames = "name", calledMethod = "treatEvent1") public void event1(String obj); @NotFoundHistory @Event(handlers = PresenterWithName.class) public void event2(); } @ChildModules({}) @com.mvp4g.client.annotation.Events(startPresenter = SimplePresenter01.class) public static interface EventBusUselessChildModules extends EventBus { } @com.mvp4g.client.annotation.Events(startPresenter = PresenterWithName.class, module = Modules.Module01.class) public static interface EventBusForOtherModule extends EventBus { } @ChildModules({ @ChildModule(moduleClass = Modules.Module01.class), @ChildModule(moduleClass = Modules.Module01.class) }) @com.mvp4g.client.annotation.Events(startPresenter = SimplePresenter01.class) public static interface EventBusWithSameChild extends EventBus { } @ChildModules({ @ChildModule(moduleClass = Modules.Module01.class), @ChildModule(moduleClass = Modules.ModuleWithParent01.class, async = false, autoDisplay = false) }) @com.mvp4g.client.annotation.Events(startPresenter = PresenterWithName.class) public static interface EventBusWithChildren extends EventBus { @DisplayChildModuleView(Modules.Module01.class) @Event() public void event1(String obj); @Event(handlers = PresenterWithName.class, forwardToModules = Modules.Module01.class) public void event2(); @Event(handlers = PresenterWithName.class, forwardToModules = { Modules.ModuleWithParent01.class, Modules.Module01.class }) public void event3(); @Event(handlers = PresenterWithName.class, forwardToParent = true) public void event4(); } @com.mvp4g.client.annotation.Events(startPresenter = PresenterWithName.class) public static interface EventBusWithSiblings extends EventBus { @Event(handlers = PresenterWithName.class, forwardToModules = Modules.Module01.class) public void event1(); @Event(handlers = PresenterWithName.class, forwardToModules = { Modules.ModuleWithParent01.class, Modules.Module01.class }) public void event2(); } @com.mvp4g.client.annotation.Events(startPresenter = PresenterWithName.class) public static interface EventBusDoubleBefore extends EventBus { @BeforeLoadChildModule @Event public void event1(); @BeforeLoadChildModule @Event public void event2(); } @com.mvp4g.client.annotation.Events(startPresenter = PresenterWithName.class) public static interface EventBusDoubleAfter extends EventBus { @AfterLoadChildModule @Event public void event1(); @AfterLoadChildModule @Event public void event2(); } @com.mvp4g.client.annotation.Events(startPresenter = PresenterWithName.class) public static interface EventBusDoubleError extends EventBus { @LoadChildModuleError @Event public void event1(); @LoadChildModuleError @Event public void event2(); } @com.mvp4g.client.annotation.Events(startPresenter = PresenterWithName.class) public static interface EventBusLoadChildConfig extends EventBus { @BeforeLoadChildModule @Event public void event1(); @AfterLoadChildModule @Event public void event2(); @LoadChildModuleError @Event public void event3(); } @ChildModules({ @ChildModule(moduleClass = Modules.Module01.class) }) @com.mvp4g.client.annotation.Events(startPresenter = PresenterWithName.class) public static interface EventBusUnknownModuleForEvent extends EventBus { @Event(handlers = PresenterWithName.class, forwardToModules = Modules.ModuleWithParent01.class) public void event2(); } @ChildModules({ @ChildModule(moduleClass = Modules.Module01.class) }) @com.mvp4g.client.annotation.Events(startPresenter = PresenterWithName.class) public static interface EventBusUnknownModuleForLoadModuleViewEvent extends EventBus { @DisplayChildModuleView(Modules.ModuleWithParent01.class) @Event(handlers = PresenterWithName.class) public void event2(); } @ChildModules({ @ChildModule(moduleClass = Modules.Module01.class) }) @com.mvp4g.client.annotation.Events(startPresenter = PresenterWithName.class) public static interface EventBusSameModuleForLoadModuleViewEvent extends EventBus { @DisplayChildModuleView(Modules.Module01.class) @Event(handlers = PresenterWithName.class) public void event1(); @DisplayChildModuleView(Modules.Module01.class) @Event(handlers = PresenterWithName.class) public void event2(); } @Filters(filterClasses = {}) @com.mvp4g.client.annotation.Events(startPresenter = PresenterWithName.class) public static interface EventBusUselessFilter extends EventBus { } @Filters(filterClasses = {}, forceFilters = true) @com.mvp4g.client.annotation.Events(startPresenter = PresenterWithName.class) public static interface EventBusNoFilterWithForce extends EventBus { } @Filters(filterClasses = { EventFilters.EventFilter1.class, EventFilters.EventFilter2.class }) @com.mvp4g.client.annotation.Events(startPresenter = PresenterWithName.class) public static interface EventBusWithFilters extends EventBus { } @Filters(filterClasses = { EventFilters.EventFilter1.class, EventFilters.EventFilter2.class }, afterHistory = true, filterForward = false, filterStart = false, forceFilters = true) @com.mvp4g.client.annotation.Events(startPresenter = PresenterWithName.class) public static interface EventBusWithFiltersWithParam extends EventBus { } @Filters(filterClasses = { EventFilters.EventFilter1.class, EventFilters.EventFilter1.class }) @com.mvp4g.client.annotation.Events(startPresenter = PresenterWithName.class) public static interface EventBusWithSameFilter extends EventBus { } @com.mvp4g.client.annotation.Events(startPresenter = PresenterWithName.class, ginModules = OneGinModule.class, ginModuleProperties = "property1") public static interface EventBusWithGin extends EventBus { } @com.mvp4g.client.annotation.Events(startPresenter = PresenterWithName.class, ginModules = { OneGinModule.class, DefaultMvp4gGinModule.class }, ginModuleProperties = { "property1", "property2" }) public static interface EventBusWithGins extends EventBus { } @com.mvp4g.client.annotation.Events(startPresenter = PresenterWithName.class) @Debug public static interface EventBusWithDefaultLogger extends EventBus { } @com.mvp4g.client.annotation.Events(startPresenter = PresenterWithName.class) @Debug(logger = TestLogger.class, logLevel = LogLevel.DETAILED) public static interface EventBusWithCustomLogger extends EventBus { } @com.mvp4g.client.annotation.Events(startPresenter = PresenterWithName.class) public static interface EventBusWithHistoryName extends EventBus { @Event public void event1(String obj); @Event(name = "historyName") public void event2(); } @com.mvp4g.client.annotation.Events(startPresenter = PresenterWithName.class) @PlaceService(CustomPlaceService.class) public static interface EventBusWithHistoryConfig extends EventBus { } @com.mvp4g.client.annotation.Events(startPresenter = NoStartPresenter.class) public static interface EventBusWithNoStartPresenter extends EventBus { } @com.mvp4g.client.annotation.Events(startPresenter = SimplePresenter01.class) public static class NotInterfaceEventBus { } public class TestLogger implements Mvp4gLogger { public void log(String message, int depth) { // TODO Auto-generated method stub } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.db; import java.io.File; import java.io.IOException; import java.io.RandomAccessFile; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Random; import java.util.Set; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import org.junit.Test; import org.apache.cassandra.cache.RowCacheKey; import org.apache.cassandra.config.DatabaseDescriptor; import org.apache.cassandra.cql3.CQLTester; import org.apache.cassandra.cql3.UntypedResultSet; import org.apache.cassandra.db.rows.Row; import org.apache.cassandra.dht.BootStrapper; import org.apache.cassandra.dht.Murmur3Partitioner; import org.apache.cassandra.dht.Token; import org.apache.cassandra.io.sstable.Component; import org.apache.cassandra.io.sstable.format.SSTableReader; import org.apache.cassandra.locator.InetAddressAndPort; import org.apache.cassandra.locator.TokenMetadata; import org.apache.cassandra.service.CacheService; import org.apache.cassandra.service.StorageService; import org.apache.cassandra.utils.FBUtilities; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; public class ImportTest extends CQLTester { @Test public void basicImportTest() throws Throwable { createTable("create table %s (id int primary key, d int)"); for (int i = 0; i < 10; i++) execute("insert into %s (id, d) values (?, ?)", i, i); getCurrentColumnFamilyStore().forceBlockingFlush(); Set<SSTableReader> sstables = getCurrentColumnFamilyStore().getLiveSSTables(); getCurrentColumnFamilyStore().clearUnsafe(); File backupdir = moveToBackupDir(sstables); assertEquals(0, execute("select * from %s").size()); ColumnFamilyStore.ImportOptions options = ColumnFamilyStore.ImportOptions.options(backupdir.toString()).build(); getCurrentColumnFamilyStore().importNewSSTables(options); assertEquals(10, execute("select * from %s").size()); } @Test @Deprecated public void refreshTest() throws Throwable { createTable("create table %s (id int primary key, d int)"); for (int i = 0; i < 10; i++) execute("insert into %s (id, d) values (?, ?)", i, i); getCurrentColumnFamilyStore().forceBlockingFlush(); getCurrentColumnFamilyStore().clearUnsafe(); assertEquals(0, execute("select * from %s").size()); getCurrentColumnFamilyStore().loadNewSSTables(); assertEquals(10, execute("select * from %s").size()); } @Test public void importResetLevelTest() throws Throwable { createTable("create table %s (id int primary key, d int)"); for (int i = 0; i < 10; i++) execute("insert into %s (id, d) values (?, ?)", i, i); getCurrentColumnFamilyStore().forceBlockingFlush(); Set<SSTableReader> sstables = getCurrentColumnFamilyStore().getLiveSSTables(); getCurrentColumnFamilyStore().clearUnsafe(); for (SSTableReader sstable : sstables) sstable.descriptor.getMetadataSerializer().mutateLevel(sstable.descriptor, 8); File backupdir = moveToBackupDir(sstables); assertEquals(0, execute("select * from %s").size()); ColumnFamilyStore.ImportOptions options = ColumnFamilyStore.ImportOptions.options(backupdir.toString()).build(); getCurrentColumnFamilyStore().importNewSSTables(options); assertEquals(10, execute("select * from %s").size()); sstables = getCurrentColumnFamilyStore().getLiveSSTables(); assertEquals(1, sstables.size()); for (SSTableReader sstable : sstables) assertEquals(8, sstable.getSSTableLevel()); getCurrentColumnFamilyStore().clearUnsafe(); backupdir = moveToBackupDir(sstables); options = ColumnFamilyStore.ImportOptions.options(backupdir.toString()).resetLevel(true).build(); getCurrentColumnFamilyStore().importNewSSTables(options); sstables = getCurrentColumnFamilyStore().getLiveSSTables(); assertEquals(1, sstables.size()); for (SSTableReader sstable : getCurrentColumnFamilyStore().getLiveSSTables()) assertEquals(0, sstable.getSSTableLevel()); } @Test public void importClearRepairedTest() throws Throwable { createTable("create table %s (id int primary key, d int)"); for (int i = 0; i < 10; i++) execute("insert into %s (id, d) values (?, ?)", i, i); getCurrentColumnFamilyStore().forceBlockingFlush(); Set<SSTableReader> sstables = getCurrentColumnFamilyStore().getLiveSSTables(); getCurrentColumnFamilyStore().clearUnsafe(); for (SSTableReader sstable : sstables) sstable.descriptor.getMetadataSerializer().mutateRepaired(sstable.descriptor, 111, null); File backupdir = moveToBackupDir(sstables); assertEquals(0, execute("select * from %s").size()); ColumnFamilyStore.ImportOptions options = ColumnFamilyStore.ImportOptions.options(backupdir.toString()).build(); getCurrentColumnFamilyStore().importNewSSTables(options); assertEquals(10, execute("select * from %s").size()); sstables = getCurrentColumnFamilyStore().getLiveSSTables(); assertEquals(1, sstables.size()); for (SSTableReader sstable : sstables) assertTrue(sstable.isRepaired()); getCurrentColumnFamilyStore().clearUnsafe(); backupdir = moveToBackupDir(sstables); options = ColumnFamilyStore.ImportOptions.options(backupdir.toString()).clearRepaired(true).build(); getCurrentColumnFamilyStore().importNewSSTables(options); sstables = getCurrentColumnFamilyStore().getLiveSSTables(); assertEquals(1, sstables.size()); for (SSTableReader sstable : getCurrentColumnFamilyStore().getLiveSSTables()) assertFalse(sstable.isRepaired()); } private File moveToBackupDir(Set<SSTableReader> sstables) throws IOException { Path temp = Files.createTempDirectory("importtest"); SSTableReader sst = sstables.iterator().next(); System.out.println("DIR: "+sst.descriptor.directory); String tabledir = sst.descriptor.directory.getName(); String ksdir = sst.descriptor.directory.getParentFile().getName(); Path backupdir = Files.createDirectories(Paths.get(temp.toString(), ksdir, tabledir)); for (SSTableReader sstable : sstables) { for (File f : sstable.descriptor.directory.listFiles()) { if (f.toString().contains(sstable.descriptor.baseFilename())) { System.out.println("move " + f.toPath() + " to " + backupdir); Files.move(f.toPath(), new File(backupdir.toFile(), f.getName()).toPath()); } } } return backupdir.toFile(); } @Test public void testBestDisk() throws Throwable { createTable("create table %s (id int primary key, d int)"); TokenMetadata tmd = StorageService.instance.getTokenMetadata(); tmd.updateNormalTokens(BootStrapper.getRandomTokens(tmd, 1), InetAddressAndPort.getByName("127.0.0.1")); Directories dirs = new Directories(getCurrentColumnFamilyStore().metadata(), Lists.newArrayList(new Directories.DataDirectory(new File("/tmp/1")), new Directories.DataDirectory(new File("/tmp/2")), new Directories.DataDirectory(new File("/tmp/3")))); MockCFS mock = new MockCFS(getCurrentColumnFamilyStore(), dirs); int rows = 1000; Random rand = new Random(); for (int i = 0; i < rows; i++) execute("insert into %s (id, d) values (?, ?)", rand.nextInt(), i); UntypedResultSet res = execute("SELECT token(id) as t FROM %s"); long disk1 = 0, disk2 = 0, disk3 = 0; DiskBoundaries boundaries = mock.getDiskBoundaries(); for (UntypedResultSet.Row r : res) { Token t = new Murmur3Partitioner.LongToken(r.getLong("t")); if (boundaries.positions.get(0).compareTo(t.minKeyBound()) > 0) disk1++; else if (boundaries.positions.get(1).compareTo(t.minKeyBound()) > 0) disk2++; else disk3++; } File expected; if (disk1 >= disk2 && disk1 >= disk3) expected = new File("/tmp/1"); else if (disk2 >= disk1 && disk2 >= disk3) expected = new File("/tmp/2"); else expected = new File("/tmp/3"); getCurrentColumnFamilyStore().forceBlockingFlush(); SSTableReader sstable = getCurrentColumnFamilyStore().getLiveSSTables().iterator().next(); File bestDisk = ColumnFamilyStore.findBestDiskAndInvalidateCaches(mock, sstable.descriptor, "/tmp/", false, true); assertTrue(expected + " : "+ bestDisk, bestDisk.toString().startsWith(expected.toString())); } @Test public void testNoCounting() throws Throwable { createTable("create table %s (id int primary key, d int)"); Directories dirs = new Directories(getCurrentColumnFamilyStore().metadata(), Lists.newArrayList(new Directories.DataDirectory(new File("/tmp/1")), new Directories.DataDirectory(new File("/tmp/2")), new Directories.DataDirectory(new File("/tmp/3")))); for (int i = 0; i < 10; i++) execute("insert into %s (id, d) values (?, ?)", i, i); getCurrentColumnFamilyStore().forceBlockingFlush(); Set<SSTableReader> toMove = getCurrentColumnFamilyStore().getLiveSSTables(); getCurrentColumnFamilyStore().clearUnsafe(); File dir = moveToBackupDir(toMove); MockCFS mock = new MockCFS(getCurrentColumnFamilyStore(), dirs); mock.importNewSSTables(ColumnFamilyStore.ImportOptions.options(dir.toString()).build()); assertEquals(1, mock.getLiveSSTables().size()); for (SSTableReader sstable : mock.getLiveSSTables()) { String expected = new File("/tmp/").getCanonicalPath(); assertTrue("dir = "+sstable.descriptor.directory + " : "+expected , sstable.descriptor.directory.toString().startsWith(expected)); assertTrue(sstable.descriptor.directory.toString().contains(getCurrentColumnFamilyStore().metadata.id.toHexString())); } } @Test public void testImportCorrupt() throws Throwable { createTable("create table %s (id int primary key, d int)"); for (int i = 0; i < 10; i++) execute("insert into %s (id, d) values (?, ?)", i, i); getCurrentColumnFamilyStore().forceBlockingFlush(); SSTableReader sstableToCorrupt = getCurrentColumnFamilyStore().getLiveSSTables().iterator().next(); for (int i = 0; i < 10; i++) execute("insert into %s (id, d) values (?, ?)", i + 10, i); getCurrentColumnFamilyStore().forceBlockingFlush(); Set<SSTableReader> sstables = getCurrentColumnFamilyStore().getLiveSSTables(); getCurrentColumnFamilyStore().clearUnsafe(); try (RandomAccessFile file = new RandomAccessFile(sstableToCorrupt.descriptor.filenameFor(Component.DIGEST), "rw")) { Long correctChecksum = Long.valueOf(file.readLine()); VerifyTest.writeChecksum(++correctChecksum, sstableToCorrupt.descriptor.filenameFor(Component.DIGEST)); } File backupdir = moveToBackupDir(sstables); try { ColumnFamilyStore.ImportOptions options = ColumnFamilyStore.ImportOptions.options(backupdir.toString()).verifySSTables(true).build(); getCurrentColumnFamilyStore().importNewSSTables(options); fail("importNewSSTables should fail!"); } catch (Throwable t) { for (File f : getCurrentColumnFamilyStore().getDirectories().getDirectoryForNewSSTables().listFiles()) { if (f.isFile()) fail("there should not be any sstables in the data directory after a failed import: " + f); } } } @Test(expected = RuntimeException.class) public void testImportOutOfRange() throws Throwable { createTable("create table %s (id int primary key, d int)"); for (int i = 0; i < 1000; i++) execute("insert into %s (id, d) values (?, ?)", i, i); getCurrentColumnFamilyStore().forceBlockingFlush(); Set<SSTableReader> sstables = getCurrentColumnFamilyStore().getLiveSSTables(); getCurrentColumnFamilyStore().clearUnsafe(); TokenMetadata tmd = StorageService.instance.getTokenMetadata(); tmd.updateNormalTokens(BootStrapper.getRandomTokens(tmd, 5), InetAddressAndPort.getByName("127.0.0.1")); tmd.updateNormalTokens(BootStrapper.getRandomTokens(tmd, 5), InetAddressAndPort.getByName("127.0.0.2")); tmd.updateNormalTokens(BootStrapper.getRandomTokens(tmd, 5), InetAddressAndPort.getByName("127.0.0.3")); File backupdir = moveToBackupDir(sstables); try { ColumnFamilyStore.ImportOptions options = ColumnFamilyStore.ImportOptions.options(backupdir.toString()).verifySSTables(true).verifyTokens(true).build(); getCurrentColumnFamilyStore().importNewSSTables(options); } finally { tmd.clearUnsafe(); } } @Test(expected = RuntimeException.class) public void testImportOutOfRangeExtendedVerify() throws Throwable { createTable("create table %s (id int primary key, d int)"); for (int i = 0; i < 1000; i++) execute("insert into %s (id, d) values (?, ?)", i, i); getCurrentColumnFamilyStore().forceBlockingFlush(); Set<SSTableReader> sstables = getCurrentColumnFamilyStore().getLiveSSTables(); getCurrentColumnFamilyStore().clearUnsafe(); TokenMetadata tmd = StorageService.instance.getTokenMetadata(); tmd.updateNormalTokens(BootStrapper.getRandomTokens(tmd, 5), InetAddressAndPort.getByName("127.0.0.1")); tmd.updateNormalTokens(BootStrapper.getRandomTokens(tmd, 5), InetAddressAndPort.getByName("127.0.0.2")); tmd.updateNormalTokens(BootStrapper.getRandomTokens(tmd, 5), InetAddressAndPort.getByName("127.0.0.3")); File backupdir = moveToBackupDir(sstables); try { ColumnFamilyStore.ImportOptions options = ColumnFamilyStore.ImportOptions.options(backupdir.toString()) .verifySSTables(true) .verifyTokens(true) .extendedVerify(true).build(); getCurrentColumnFamilyStore().importNewSSTables(options); } finally { tmd.clearUnsafe(); } } @Test public void testImportInvalidateCache() throws Throwable { createTable("create table %s (id int primary key, d int) WITH caching = { 'keys': 'NONE', 'rows_per_partition': 'ALL' }"); for (int i = 0; i < 10; i++) execute("insert into %s (id, d) values (?, ?)", i, i); getCurrentColumnFamilyStore().forceBlockingFlush(); CacheService.instance.setRowCacheCapacityInMB(1); Set<RowCacheKey> keysToInvalidate = new HashSet<>(); // populate the row cache with keys from the sstable we are about to remove for (int i = 0; i < 10; i++) { execute("SELECT * FROM %s WHERE id = ?", i); } Iterator<RowCacheKey> it = CacheService.instance.rowCache.keyIterator(); while (it.hasNext()) { keysToInvalidate.add(it.next()); } SSTableReader sstableToImport = getCurrentColumnFamilyStore().getLiveSSTables().iterator().next(); getCurrentColumnFamilyStore().clearUnsafe(); for (int i = 10; i < 20; i++) execute("insert into %s (id, d) values (?, ?)", i, i); getCurrentColumnFamilyStore().forceBlockingFlush(); Set<RowCacheKey> allCachedKeys = new HashSet<>(); // populate row cache with sstable we are keeping for (int i = 10; i < 20; i++) { execute("SELECT * FROM %s WHERE id = ?", i); } it = CacheService.instance.rowCache.keyIterator(); while (it.hasNext()) { allCachedKeys.add(it.next()); } assertEquals(20, CacheService.instance.rowCache.size()); File backupdir = moveToBackupDir(Collections.singleton(sstableToImport)); // make sure we don't wipe caches with invalidateCaches = false: Set<SSTableReader> beforeFirstImport = getCurrentColumnFamilyStore().getLiveSSTables(); ColumnFamilyStore.ImportOptions options = ColumnFamilyStore.ImportOptions.options(backupdir.toString()).verifySSTables(true).verifyTokens(true).build(); getCurrentColumnFamilyStore().importNewSSTables(options); assertEquals(20, CacheService.instance.rowCache.size()); Set<SSTableReader> toMove = Sets.difference(getCurrentColumnFamilyStore().getLiveSSTables(), beforeFirstImport); getCurrentColumnFamilyStore().clearUnsafe(); // move away the sstable we just imported again: backupdir = moveToBackupDir(toMove); options = ColumnFamilyStore.ImportOptions.options(backupdir.toString()).verifySSTables(true).verifyTokens(true).invalidateCaches(true).build(); getCurrentColumnFamilyStore().importNewSSTables(options); assertEquals(10, CacheService.instance.rowCache.size()); it = CacheService.instance.rowCache.keyIterator(); while (it.hasNext()) { // make sure the keys from the sstable we are importing are invalidated and that the other one is still there RowCacheKey rck = it.next(); assertTrue(allCachedKeys.contains(rck)); assertFalse(keysToInvalidate.contains(rck)); } } @Test public void testImportCacheEnabledWithoutSrcDir() throws Throwable { createTable("create table %s (id int primary key, d int) WITH caching = { 'keys': 'NONE', 'rows_per_partition': 'ALL' }"); for (int i = 0; i < 10; i++) execute("insert into %s (id, d) values (?, ?)", i, i); getCurrentColumnFamilyStore().forceBlockingFlush(); CacheService.instance.setRowCacheCapacityInMB(1); getCurrentColumnFamilyStore().clearUnsafe(); ColumnFamilyStore.ImportOptions options = ColumnFamilyStore.ImportOptions.options(null).invalidateCaches(true).build(); getCurrentColumnFamilyStore().importNewSSTables(options); assertEquals(1, getCurrentColumnFamilyStore().getLiveSSTables().size()); } private static class MockCFS extends ColumnFamilyStore { public MockCFS(ColumnFamilyStore cfs, Directories dirs) { super(cfs.keyspace, cfs.getTableName(), 0, cfs.metadata, dirs, false, false, true); } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.cluster.routing; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Sets; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.shard.ShardId; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ThreadLocalRandom; /** * {@link IndexShardRoutingTable} encapsulates all instances of a single shard. * Each Elasticsearch index consists of multiple shards, each shard encapsulates * a disjoint set of the index data and each shard has one or more instances * referred to as replicas of a shard. Given that, this class encapsulates all * replicas (instances) for a single index shard. */ public class IndexShardRoutingTable implements Iterable<ShardRouting> { final ShardShuffler shuffler; final ShardId shardId; final ShardRouting primary; final ImmutableList<ShardRouting> primaryAsList; final ImmutableList<ShardRouting> replicas; final ImmutableList<ShardRouting> shards; final ImmutableList<ShardRouting> activeShards; final ImmutableList<ShardRouting> assignedShards; final static ImmutableList<ShardRouting> NO_SHARDS = ImmutableList.of(); final boolean allShardsStarted; /** * The initializing list, including ones that are initializing on a target node because of relocation. * If we can come up with a better variable name, it would be nice... */ final ImmutableList<ShardRouting> allInitializingShards; IndexShardRoutingTable(ShardId shardId, List<ShardRouting> shards) { this.shardId = shardId; this.shuffler = new RotationShardShuffler(ThreadLocalRandom.current().nextInt()); this.shards = ImmutableList.copyOf(shards); ShardRouting primary = null; ImmutableList.Builder<ShardRouting> replicas = ImmutableList.builder(); ImmutableList.Builder<ShardRouting> activeShards = ImmutableList.builder(); ImmutableList.Builder<ShardRouting> assignedShards = ImmutableList.builder(); ImmutableList.Builder<ShardRouting> allInitializingShards = ImmutableList.builder(); boolean allShardsStarted = true; for (ShardRouting shard : shards) { if (shard.primary()) { primary = shard; } else { replicas.add(shard); } if (shard.active()) { activeShards.add(shard); } if (shard.initializing()) { allInitializingShards.add(shard); } if (shard.relocating()) { // create the target initializing shard routing on the node the shard is relocating to allInitializingShards.add(shard.buildTargetRelocatingShard()); } if (shard.assignedToNode()) { assignedShards.add(shard); } if (shard.state() != ShardRoutingState.STARTED) { allShardsStarted = false; } } this.allShardsStarted = allShardsStarted; this.primary = primary; if (primary != null) { this.primaryAsList = ImmutableList.of(primary); } else { this.primaryAsList = ImmutableList.of(); } this.replicas = replicas.build(); this.activeShards = activeShards.build(); this.assignedShards = assignedShards.build(); this.allInitializingShards = allInitializingShards.build(); } /** * Normalizes all shard routings to the same version. */ public IndexShardRoutingTable normalizeVersions() { if (shards.isEmpty()) { return this; } if (shards.size() == 1) { return this; } long highestVersion = shards.get(0).version(); boolean requiresNormalization = false; for (int i = 1; i < shards.size(); i++) { if (shards.get(i).version() != highestVersion) { requiresNormalization = true; } if (shards.get(i).version() > highestVersion) { highestVersion = shards.get(i).version(); } } if (!requiresNormalization) { return this; } List<ShardRouting> shardRoutings = new ArrayList<>(shards.size()); for (int i = 0; i < shards.size(); i++) { if (shards.get(i).version() == highestVersion) { shardRoutings.add(shards.get(i)); } else { shardRoutings.add(new ShardRouting(shards.get(i), highestVersion)); } } return new IndexShardRoutingTable(shardId, ImmutableList.copyOf(shardRoutings)); } /** * Returns the shards id * * @return id of the shard */ public ShardId shardId() { return shardId; } /** * Returns the shards id * * @return id of the shard */ public ShardId getShardId() { return shardId(); } @Override public Iterator<ShardRouting> iterator() { return shards.iterator(); } /** * Returns the number of this shards instances. */ public int size() { return shards.size(); } /** * Returns the number of this shards instances. */ public int getSize() { return size(); } /** * Returns a {@link List} of shards * * @return a {@link List} of shards */ public List<ShardRouting> shards() { return this.shards; } /** * Returns a {@link List} of shards * * @return a {@link List} of shards */ public List<ShardRouting> getShards() { return shards(); } /** * Returns a {@link List} of active shards * * @return a {@link List} of shards */ public List<ShardRouting> activeShards() { return this.activeShards; } /** * Returns a {@link List} of active shards * * @return a {@link List} of shards */ public List<ShardRouting> getActiveShards() { return activeShards(); } /** * Returns a {@link List} of assigned shards * * @return a {@link List} of shards */ public List<ShardRouting> assignedShards() { return this.assignedShards; } /** * Returns a {@link List} of assigned shards * * @return a {@link List} of shards */ public List<ShardRouting> getAssignedShards() { return this.assignedShards; } public ShardIterator shardsRandomIt() { return new PlainShardIterator(shardId, shuffler.shuffle(shards)); } public ShardIterator shardsIt() { return new PlainShardIterator(shardId, shards); } public ShardIterator shardsIt(int seed) { return new PlainShardIterator(shardId, shuffler.shuffle(shards, seed)); } /** * Returns an iterator over active and initializing shards. Making sure though that * its random within the active shards, and initializing shards are the last to iterate through. */ public ShardIterator activeInitializingShardsRandomIt() { return activeInitializingShardsIt(shuffler.nextSeed()); } /** * Returns an iterator over active and initializing shards. Making sure though that * its random within the active shards, and initializing shards are the last to iterate through. */ public ShardIterator activeInitializingShardsIt(int seed) { if (allInitializingShards.isEmpty()) { return new PlainShardIterator(shardId, shuffler.shuffle(activeShards, seed)); } ArrayList<ShardRouting> ordered = new ArrayList<>(activeShards.size() + allInitializingShards.size()); ordered.addAll(shuffler.shuffle(activeShards, seed)); ordered.addAll(allInitializingShards); return new PlainShardIterator(shardId, ordered); } /** * Returns true if no primaries are active or initializing for this shard */ private boolean noPrimariesActive() { if (!primaryAsList.isEmpty() && !primaryAsList.get(0).active() && !primaryAsList.get(0).initializing()) { return true; } return false; } /** * Returns an iterator only on the primary shard. */ public ShardIterator primaryShardIt() { return new PlainShardIterator(shardId, primaryAsList); } public ShardIterator primaryActiveInitializingShardIt() { if (noPrimariesActive()) { return new PlainShardIterator(shardId, NO_SHARDS); } return primaryShardIt(); } public ShardIterator primaryFirstActiveInitializingShardsIt() { ArrayList<ShardRouting> ordered = new ArrayList<>(activeShards.size() + allInitializingShards.size()); // fill it in a randomized fashion for (ShardRouting shardRouting : shuffler.shuffle(activeShards)) { ordered.add(shardRouting); if (shardRouting.primary()) { // switch, its the matching node id ordered.set(ordered.size() - 1, ordered.get(0)); ordered.set(0, shardRouting); } } // no need to worry about primary first here..., its temporal if (!allInitializingShards.isEmpty()) { ordered.addAll(allInitializingShards); } return new PlainShardIterator(shardId, ordered); } public ShardIterator replicaActiveInitializingShardIt() { // If the primaries are unassigned, return an empty list (there aren't // any replicas to query anyway) if (noPrimariesActive()) { return new PlainShardIterator(shardId, NO_SHARDS); } LinkedList<ShardRouting> ordered = new LinkedList<>(); for (ShardRouting replica : shuffler.shuffle(replicas)) { if (replica.active()) { ordered.addFirst(replica); } else if (replica.initializing()) { ordered.addLast(replica); } } return new PlainShardIterator(shardId, ordered); } public ShardIterator replicaFirstActiveInitializingShardsIt() { // If the primaries are unassigned, return an empty list (there aren't // any replicas to query anyway) if (noPrimariesActive()) { return new PlainShardIterator(shardId, NO_SHARDS); } ArrayList<ShardRouting> ordered = new ArrayList<>(activeShards.size() + allInitializingShards.size()); // fill it in a randomized fashion with the active replicas for (ShardRouting replica : shuffler.shuffle(replicas)) { if (replica.active()) { ordered.add(replica); } } // Add the primary shard ordered.add(primary); // Add initializing shards last if (!allInitializingShards.isEmpty()) { ordered.addAll(allInitializingShards); } return new PlainShardIterator(shardId, ordered); } public ShardIterator onlyNodeActiveInitializingShardsIt(String nodeId) { ArrayList<ShardRouting> ordered = new ArrayList<>(activeShards.size() + allInitializingShards.size()); // fill it in a randomized fashion for (int i = 0; i < activeShards.size(); i++) { ShardRouting shardRouting = activeShards.get(i); if (nodeId.equals(shardRouting.currentNodeId())) { ordered.add(shardRouting); } } for (int i = 0; i < allInitializingShards.size(); i++) { ShardRouting shardRouting = allInitializingShards.get(i); if (nodeId.equals(shardRouting.currentNodeId())) { ordered.add(shardRouting); } } return new PlainShardIterator(shardId, ordered); } /** * Returns shards based on nodeAttributes given such as node name , node attribute, node IP * Supports node specifications in cluster API * * @param nodeAttribute * @param discoveryNodes */ public ShardIterator onlyNodeSelectorActiveInitializingShardsIt(String nodeAttribute, DiscoveryNodes discoveryNodes) { ArrayList<ShardRouting> ordered = new ArrayList<>(activeShards.size() + allInitializingShards.size()); Set<String> selectedNodes = Sets.newHashSet(discoveryNodes.resolveNodesIds(nodeAttribute)); for (ShardRouting shardRouting : activeShards) { if (selectedNodes.contains(shardRouting.currentNodeId())) { ordered.add(shardRouting); } } for (ShardRouting shardRouting : allInitializingShards) { if (selectedNodes.contains(shardRouting.currentNodeId())) { ordered.add(shardRouting); } } if (ordered.isEmpty()) { throw new IllegalArgumentException("No data node with critera [" + nodeAttribute + "] found"); } return new PlainShardIterator(shardId, ordered); } public ShardIterator preferNodeActiveInitializingShardsIt(String nodeId) { ArrayList<ShardRouting> ordered = new ArrayList<>(activeShards.size() + allInitializingShards.size()); // fill it in a randomized fashion for (ShardRouting shardRouting : shuffler.shuffle(activeShards)) { ordered.add(shardRouting); if (nodeId.equals(shardRouting.currentNodeId())) { // switch, its the matching node id ordered.set(ordered.size() - 1, ordered.get(0)); ordered.set(0, shardRouting); } } if (!allInitializingShards.isEmpty()) { ordered.addAll(allInitializingShards); } return new PlainShardIterator(shardId, ordered); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; IndexShardRoutingTable that = (IndexShardRoutingTable) o; if (!shardId.equals(that.shardId)) return false; if (!shards.equals(that.shards)) return false; return true; } @Override public int hashCode() { int result = shardId.hashCode(); result = 31 * result + shards.hashCode(); return result; } /** * Returns <code>true</code> iff all shards in the routing table are started otherwise <code>false</code> */ public boolean allShardsStarted() { return allShardsStarted; } static class AttributesKey { final String[] attributes; AttributesKey(String[] attributes) { this.attributes = attributes; } @Override public int hashCode() { return Arrays.hashCode(attributes); } @Override public boolean equals(Object obj) { return Arrays.equals(attributes, ((AttributesKey) obj).attributes); } } static class AttributesRoutings { public final ImmutableList<ShardRouting> withSameAttribute; public final ImmutableList<ShardRouting> withoutSameAttribute; public final int totalSize; AttributesRoutings(ImmutableList<ShardRouting> withSameAttribute, ImmutableList<ShardRouting> withoutSameAttribute) { this.withSameAttribute = withSameAttribute; this.withoutSameAttribute = withoutSameAttribute; this.totalSize = withoutSameAttribute.size() + withSameAttribute.size(); } } private volatile Map<AttributesKey, AttributesRoutings> activeShardsByAttributes = ImmutableMap.of(); private volatile Map<AttributesKey, AttributesRoutings> initializingShardsByAttributes = ImmutableMap.of(); private final Object shardsByAttributeMutex = new Object(); private AttributesRoutings getActiveAttribute(AttributesKey key, DiscoveryNodes nodes) { AttributesRoutings shardRoutings = activeShardsByAttributes.get(key); if (shardRoutings == null) { synchronized (shardsByAttributeMutex) { ArrayList<ShardRouting> from = new ArrayList<>(activeShards); ImmutableList<ShardRouting> to = collectAttributeShards(key, nodes, from); shardRoutings = new AttributesRoutings(to, ImmutableList.copyOf(from)); activeShardsByAttributes = MapBuilder.newMapBuilder(activeShardsByAttributes).put(key, shardRoutings).immutableMap(); } } return shardRoutings; } private AttributesRoutings getInitializingAttribute(AttributesKey key, DiscoveryNodes nodes) { AttributesRoutings shardRoutings = initializingShardsByAttributes.get(key); if (shardRoutings == null) { synchronized (shardsByAttributeMutex) { ArrayList<ShardRouting> from = new ArrayList<>(allInitializingShards); ImmutableList<ShardRouting> to = collectAttributeShards(key, nodes, from); shardRoutings = new AttributesRoutings(to, ImmutableList.copyOf(from)); initializingShardsByAttributes = MapBuilder.newMapBuilder(initializingShardsByAttributes).put(key, shardRoutings).immutableMap(); } } return shardRoutings; } private static ImmutableList<ShardRouting> collectAttributeShards(AttributesKey key, DiscoveryNodes nodes, ArrayList<ShardRouting> from) { final ArrayList<ShardRouting> to = new ArrayList<>(); for (final String attribute : key.attributes) { final String localAttributeValue = nodes.localNode().attributes().get(attribute); if (localAttributeValue != null) { for (Iterator<ShardRouting> iterator = from.iterator(); iterator.hasNext(); ) { ShardRouting fromShard = iterator.next(); final DiscoveryNode discoveryNode = nodes.get(fromShard.currentNodeId()); if (discoveryNode == null) { iterator.remove(); // node is not present anymore - ignore shard } else if (localAttributeValue.equals(discoveryNode.attributes().get(attribute))) { iterator.remove(); to.add(fromShard); } } } } return ImmutableList.copyOf(to); } public ShardIterator preferAttributesActiveInitializingShardsIt(String[] attributes, DiscoveryNodes nodes) { return preferAttributesActiveInitializingShardsIt(attributes, nodes, shuffler.nextSeed()); } public ShardIterator preferAttributesActiveInitializingShardsIt(String[] attributes, DiscoveryNodes nodes, int seed) { AttributesKey key = new AttributesKey(attributes); AttributesRoutings activeRoutings = getActiveAttribute(key, nodes); AttributesRoutings initializingRoutings = getInitializingAttribute(key, nodes); // we now randomize, once between the ones that have the same attributes, and once for the ones that don't // we don't want to mix between the two! ArrayList<ShardRouting> ordered = new ArrayList<>(activeRoutings.totalSize + initializingRoutings.totalSize); ordered.addAll(shuffler.shuffle(activeRoutings.withSameAttribute, seed)); ordered.addAll(shuffler.shuffle(activeRoutings.withoutSameAttribute, seed)); ordered.addAll(shuffler.shuffle(initializingRoutings.withSameAttribute, seed)); ordered.addAll(shuffler.shuffle(initializingRoutings.withoutSameAttribute, seed)); return new PlainShardIterator(shardId, ordered); } public ShardRouting primaryShard() { return primary; } public List<ShardRouting> replicaShards() { return this.replicas; } public List<ShardRouting> replicaShardsWithState(ShardRoutingState... states) { List<ShardRouting> shards = new ArrayList<>(); for (ShardRouting shardEntry : replicas) { for (ShardRoutingState state : states) { if (shardEntry.state() == state) { shards.add(shardEntry); } } } return shards; } public List<ShardRouting> shardsWithState(ShardRoutingState state) { if (state == ShardRoutingState.INITIALIZING) { return allInitializingShards; } List<ShardRouting> shards = new ArrayList<>(); for (ShardRouting shardEntry : this) { if (shardEntry.state() == state) { shards.add(shardEntry); } } return shards; } public static class Builder { private ShardId shardId; private final List<ShardRouting> shards; public Builder(IndexShardRoutingTable indexShard) { this.shardId = indexShard.shardId; this.shards = new ArrayList<>(indexShard.shards); } public Builder(ShardId shardId) { this.shardId = shardId; this.shards = new ArrayList<>(); } public Builder addShard(ShardRouting shardEntry) { for (ShardRouting shard : shards) { // don't add two that map to the same node id // we rely on the fact that a node does not have primary and backup of the same shard if (shard.assignedToNode() && shardEntry.assignedToNode() && shard.currentNodeId().equals(shardEntry.currentNodeId())) { return this; } } shards.add(shardEntry); return this; } public Builder removeShard(ShardRouting shardEntry) { shards.remove(shardEntry); return this; } public IndexShardRoutingTable build() { return new IndexShardRoutingTable(shardId, ImmutableList.copyOf(shards)); } public static IndexShardRoutingTable readFrom(StreamInput in) throws IOException { String index = in.readString(); return readFromThin(in, index); } public static IndexShardRoutingTable readFromThin(StreamInput in, String index) throws IOException { int iShardId = in.readVInt(); Builder builder = new Builder(new ShardId(index, iShardId)); int size = in.readVInt(); for (int i = 0; i < size; i++) { ShardRouting shard = ShardRouting.readShardRoutingEntry(in, index, iShardId); builder.addShard(shard); } return builder.build(); } public static void writeTo(IndexShardRoutingTable indexShard, StreamOutput out) throws IOException { out.writeString(indexShard.shardId().index().name()); writeToThin(indexShard, out); } public static void writeToThin(IndexShardRoutingTable indexShard, StreamOutput out) throws IOException { out.writeVInt(indexShard.shardId.id()); out.writeVInt(indexShard.shards.size()); for (ShardRouting entry : indexShard) { entry.writeToThin(out); } } } }
/** * Copyright 2011, Big Switch Networks, Inc. * Originally created by David Erickson, Stanford University * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. **/ package net.floodlightcontroller.core.test; import static org.junit.Assert.fail; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import net.dsc.cluster.HAListenerTypeMarker; import net.dsc.cluster.HARole; import net.dsc.cluster.RoleInfo; import net.dsc.cluster.model.ControllerModel; import net.dsc.hazelcast.listener.IHAListener; import net.floodlightcontroller.core.FloodlightContext; import net.floodlightcontroller.core.IFloodlightProviderService; import net.floodlightcontroller.core.IInfoProvider; import net.floodlightcontroller.core.IListener.Command; import net.floodlightcontroller.core.IOFMessageListener; import net.floodlightcontroller.core.IOFSwitch; import net.floodlightcontroller.core.internal.Controller.IUpdate; import net.floodlightcontroller.core.internal.Controller.ModuleLoaderState; import net.floodlightcontroller.core.internal.RoleManager; import net.floodlightcontroller.core.module.FloodlightModuleContext; import net.floodlightcontroller.core.module.FloodlightModuleException; import net.floodlightcontroller.core.module.IFloodlightModule; import net.floodlightcontroller.core.module.IFloodlightService; import net.floodlightcontroller.core.util.ListenerDispatcher; import net.floodlightcontroller.packet.Ethernet; import org.jboss.netty.util.Timer; import org.projectfloodlight.openflow.protocol.OFMessage; import org.projectfloodlight.openflow.protocol.OFPacketIn; import org.projectfloodlight.openflow.protocol.OFType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * * @author David Erickson (daviderickson@cs.stanford.edu) */ public class MockFloodlightProvider implements IFloodlightModule, IFloodlightProviderService { private final static Logger log = LoggerFactory.getLogger(MockFloodlightProvider.class); protected ConcurrentMap<OFType, ListenerDispatcher<OFType,IOFMessageListener>> listeners; protected ListenerDispatcher<HAListenerTypeMarker, IHAListener> haListeners; private HARole role; private final String openFlowHostname = "127.0.0.1"; private final int openFlowPort = 6653; private final boolean useAsyncUpdates; private volatile ExecutorService executorService; private volatile Future<?> mostRecentUpdateFuture; /** * */ public MockFloodlightProvider(boolean useAsyncUpdates) { listeners = new ConcurrentHashMap<OFType, ListenerDispatcher<OFType, IOFMessageListener>>(); haListeners = new ListenerDispatcher<HAListenerTypeMarker, IHAListener>(); role = null; this.useAsyncUpdates = useAsyncUpdates; } public MockFloodlightProvider() { this(false); } @Override public synchronized void addOFMessageListener(OFType type, IOFMessageListener listener) { ListenerDispatcher<OFType, IOFMessageListener> ldd = listeners.get(type); if (ldd == null) { ldd = new ListenerDispatcher<OFType, IOFMessageListener>(); listeners.put(type, ldd); } ldd.addListener(type, listener); } @Override public synchronized void removeOFMessageListener(OFType type, IOFMessageListener listener) { ListenerDispatcher<OFType, IOFMessageListener> ldd = listeners.get(type); if (ldd != null) { ldd.removeListener(listener); } } /** * @return the listeners */ @Override public Map<OFType, List<IOFMessageListener>> getListeners() { Map<OFType, List<IOFMessageListener>> lers = new HashMap<OFType, List<IOFMessageListener>>(); for(Entry<OFType, ListenerDispatcher<OFType, IOFMessageListener>> e : listeners.entrySet()) { lers.put(e.getKey(), e.getValue().getOrderedListeners()); } return Collections.unmodifiableMap(lers); } public void clearListeners() { this.listeners.clear(); } public void dispatchMessage(IOFSwitch sw, OFMessage msg) { dispatchMessage(sw, msg, new FloodlightContext()); } public void dispatchMessage(IOFSwitch sw, OFMessage msg, FloodlightContext bc) { List<IOFMessageListener> theListeners = listeners.get(msg.getType()).getOrderedListeners(); if (theListeners != null) { Command result = Command.CONTINUE; Iterator<IOFMessageListener> it = theListeners.iterator(); if (OFType.PACKET_IN.equals(msg.getType())) { OFPacketIn pi = (OFPacketIn)msg; Ethernet eth = new Ethernet(); eth.deserialize(pi.getData(), 0, pi.getData().length); IFloodlightProviderService.bcStore.put(bc, IFloodlightProviderService.CONTEXT_PI_PAYLOAD, eth); } while (it.hasNext() && !Command.STOP.equals(result)) { result = it.next().receive(sw, msg, bc); } } } @Override public void handleOutgoingMessage(IOFSwitch sw, OFMessage m) { FloodlightContext bc = new FloodlightContext(); List<IOFMessageListener> msgListeners = null; if (listeners.containsKey(m.getType())) { msgListeners = listeners.get(m.getType()).getOrderedListeners(); } if (msgListeners != null) { for (IOFMessageListener listener : msgListeners) { if (Command.STOP.equals(listener.receive(sw, m, bc))) { break; } } } } public void handleOutgoingMessages(IOFSwitch sw, List<OFMessage> msglist, FloodlightContext bc) { for (OFMessage m:msglist) { handleOutgoingMessage(sw, m); } } @Override public void run() { logListeners(); if (useAsyncUpdates) executorService = Executors.newSingleThreadExecutor(); } public void shutdown() { if (executorService != null) { executorService.shutdownNow(); executorService = null; mostRecentUpdateFuture = null; } } @Override public Collection<Class<? extends IFloodlightService>> getModuleServices() { Collection<Class<? extends IFloodlightService>> services = new ArrayList<Class<? extends IFloodlightService>>(1); services.add(IFloodlightProviderService.class); return services; } @Override public Map<Class<? extends IFloodlightService>, IFloodlightService> getServiceImpls() { Map<Class<? extends IFloodlightService>, IFloodlightService> m = new HashMap<Class<? extends IFloodlightService>, IFloodlightService>(); m.put(IFloodlightProviderService.class, this); return m; } @Override public Collection<Class<? extends IFloodlightService>> getModuleDependencies() { return null; } @Override public void init(FloodlightModuleContext context) throws FloodlightModuleException { // do nothing. } @Override public void startUp(FloodlightModuleContext context) { // do nothing. } @Override public void addInfoProvider(String type, IInfoProvider provider) { // do nothing. } @Override public void removeInfoProvider(String type, IInfoProvider provider) { // do nothing. } @Override public Map<String, Object> getControllerInfo(String type) { // mock up something Map<String, Object> summary = new HashMap<String, Object>(); summary.put("test-summary-1", 2); summary.put("test-summary-2", 5); return summary; } @Override public void addUpdateToQueue(final IUpdate update) { if (useAsyncUpdates) { mostRecentUpdateFuture = executorService.submit(new Runnable() { @Override public void run() { update.dispatch(); } }); } else { update.dispatch(); } } public void waitForUpdates(long timeout, TimeUnit unit) throws InterruptedException { long timeoutNanos = unit.toNanos(timeout); long start = System.nanoTime(); for (;;) { Future<?> future = mostRecentUpdateFuture; if ((future == null) || future.isDone()) break; Thread.sleep(100); long now = System.nanoTime(); if (now > start + timeoutNanos) { fail("Timeout waiting for update tasks to complete"); } } } @Override public void addHAListener(IHAListener listener) { haListeners.addListener(null,listener); } @Override public void removeHAListener(IHAListener listener) { haListeners.removeListener(listener); } @Override public HARole getRole() { /* DISABLE THIS CHECK FOR NOW. OTHER UNIT TESTS NEED TO BE UPDATED * FIRST if (this.role == null) throw new IllegalStateException("You need to call setRole on " + "MockFloodlightProvider before calling startUp on " + "other modules"); */ return this.role; } @Override public void setRole(HARole role, String roleChangeDescription) { this.role = role; } /** * Dispatches a new role change notification * @param oldRole * @param newRole */ public void transitionToActive() { IUpdate update = new IUpdate() { @Override public void dispatch() { for (IHAListener rl : haListeners.getOrderedListeners()) { rl.transitionToActive(); } } }; addUpdateToQueue(update); } @Override public Map<String, String> getControllerNodeIPs() { return null; } @Override public long getSystemStartTime() { return 0; } private void logListeners() { for (Map.Entry<OFType, ListenerDispatcher<OFType, IOFMessageListener>> entry : listeners.entrySet()) { OFType type = entry.getKey(); ListenerDispatcher<OFType, IOFMessageListener> ldd = entry.getValue(); StringBuffer sb = new StringBuffer(); sb.append("OFListeners for "); sb.append(type); sb.append(": "); for (IOFMessageListener l : ldd.getOrderedListeners()) { sb.append(l.getName()); sb.append(","); } log.debug(sb.toString()); } } @Override public RoleInfo getRoleInfo() { // TODO Auto-generated method stub return null; } @Override public Map<String, Long> getMemory() { Map<String, Long> m = new HashMap<String, Long>(); m.put("total", 1000000000L); m.put("free", 20000000L); return m; } @Override public Long getUptime() { return 1000000L; } @Override public String getOFHostname() { return openFlowHostname; } @Override public int getOFPort() { return openFlowPort; } @Override public void handleMessage(IOFSwitch sw, OFMessage m, FloodlightContext bContext) { // do nothing } @Override public Timer getTimer() { return null; } @Override public RoleManager getRoleManager() { return null; } @Override public ModuleLoaderState getModuleLoaderState() { return null; } @Override public ControllerModel getControllerModel() { return null; } @Override public Set<String> getUplinkPortPrefixSet() { return null; } @Override public int getWorkerThreads() { return 0; } }
package net.ros.client; import net.minecraft.block.Block; import net.minecraft.block.state.IBlockState; import net.minecraft.client.Minecraft; import net.minecraft.client.gui.inventory.GuiContainer; import net.minecraft.client.renderer.block.model.IBakedModel; import net.minecraft.client.renderer.block.model.ModelResourceLocation; import net.minecraft.client.renderer.block.statemap.StateMapperBase; import net.minecraft.item.Item; import net.minecraft.util.ResourceLocation; import net.minecraftforge.client.event.GuiScreenEvent; import net.minecraftforge.client.event.ModelBakeEvent; import net.minecraftforge.client.event.ModelRegistryEvent; import net.minecraftforge.client.event.TextureStitchEvent; import net.minecraftforge.client.model.ModelLoader; import net.minecraftforge.client.model.ModelLoaderRegistry; import net.minecraftforge.client.model.obj.OBJLoader; import net.minecraftforge.common.MinecraftForge; import net.minecraftforge.fml.client.registry.ClientRegistry; import net.minecraftforge.fml.common.event.FMLInitializationEvent; import net.minecraftforge.fml.common.event.FMLPostInitializationEvent; import net.minecraftforge.fml.common.event.FMLPreInitializationEvent; import net.minecraftforge.fml.common.eventhandler.SubscribeEvent; import net.minecraftforge.fml.relauncher.Side; import net.minecraftforge.fml.relauncher.SideOnly; import net.ros.RiseOfSteam; import net.ros.client.render.BlueprintRender; import net.ros.client.render.model.obj.ROSOBJLoader; import net.ros.client.render.model.obj.RetextureData; import net.ros.client.render.tile.*; import net.ros.common.CommonProxy; import net.ros.common.ROSConstants; import net.ros.common.block.IModelProvider; import net.ros.common.init.ROSBlocks; import net.ros.common.init.ROSFluids; import net.ros.common.init.ROSItems; import net.ros.common.item.IItemModelProvider; import net.ros.common.network.MultiblockBoxPacket; import net.ros.common.tile.TileFluidGauge; import net.ros.common.tile.TileStructure; import net.ros.common.tile.machine.*; import net.voxelindustry.brokkgui.style.StylesheetManager; import org.lwjgl.input.Mouse; import java.util.function.BiConsumer; @SideOnly(Side.CLIENT) public class ClientProxy extends CommonProxy { private PipeModelRegistry pipeModelRegistry; @Override public void preInit(final FMLPreInitializationEvent e) { OBJLoader.INSTANCE.addDomain(ROSConstants.MODID); ModelLoaderRegistry.registerLoader(ROSOBJLoader.INSTANCE); ROSOBJLoader.INSTANCE.addDomain(ROSConstants.MODID); MinecraftForge.EVENT_BUS.register(this); super.preInit(e); ROSOBJLoader.INSTANCE.addRetexturedModel("_belt_animated.mwm", RetextureData.builder() .originalModel(new ResourceLocation(ROSConstants.MODID + ":block/belt.mwm")) .texture("Top", "ros:blocks/belt_top_anim") .build()); ROSOBJLoader.INSTANCE.addRetexturedModel("_belt_slope_down.mwm", RetextureData.builder() .originalModel(new ResourceLocation(ROSConstants.MODID + ":block/belt_slope_up.mwm")) .texture("None", "ros:blocks/belt_slope_down") .build()); ROSOBJLoader.INSTANCE.addRetexturedModel("_belt_slope_up_animated.mwm", RetextureData.builder() .originalModel(new ResourceLocation(ROSConstants.MODID + ":block/belt_slope_up.mwm")) .texture("None", "ros:blocks/belt_slope_up_anim") .build()); ROSOBJLoader.INSTANCE.addRetexturedModel("_belt_slope_down_animated.mwm", RetextureData.builder() .originalModel(new ResourceLocation(ROSConstants.MODID + ":block/belt_slope_up.mwm")) .texture("None", "ros:blocks/belt_slope_down_anim") .build()); ROSOBJLoader.INSTANCE.addRetexturedModel("_fluidvalve_small.mwm", RetextureData.builder() .originalModel(new ResourceLocation(ROSConstants.MODID + ":block/steamvalve_small.mwm")) .texture("None", "ros:blocks/fluidvalve_small") .build()); ROSOBJLoader.INSTANCE.addRetexturedModel("_fluidvalve_medium.mwm", RetextureData.builder() .originalModel(new ResourceLocation(ROSConstants.MODID + ":block/steamvalve_medium.mwm")) .texture("None", "ros:blocks/fluidvalve_medium") .build()); ROSOBJLoader.INSTANCE.addRetexturedModel("_fluidpipe_cast_iron_small.mwm", RetextureData.builder() .originalModel(new ResourceLocation(ROSConstants.MODID + ":block/fluidpipe_iron_small.mwm")) .texture("None", "ros:blocks/fluidpipe_cast_iron_small") .build()); ROSOBJLoader.INSTANCE.addRetexturedModel("_fluidpipe_cast_iron_medium.mwm", RetextureData.builder() .originalModel(new ResourceLocation(ROSConstants.MODID + ":block/fluidpipe_iron_medium.mwm")) .texture("None", "ros:blocks/fluidpipe_cast_iron_medium") .build()); ROSOBJLoader.INSTANCE.addRetexturedModel("_fluidpipe_cast_iron_large.mwm", RetextureData.builder() .originalModel(new ResourceLocation(ROSConstants.MODID + ":block/fluidpipe_iron_large.mwm")) .texture("None", "ros:blocks/fluidpipe_cast_iron_large") .build()); ROSOBJLoader.INSTANCE.addRetexturedModel("_steampipe_steel_small.mwm", RetextureData.builder() .originalModel(new ResourceLocation(ROSConstants.MODID + ":block/steampipe_brass_small.mwm")) .texture("None", "ros:blocks/steampipe_steel_small") .build()); ROSOBJLoader.INSTANCE.addRetexturedModel("_steampipe_steel_medium.mwm", RetextureData.builder() .originalModel(new ResourceLocation(ROSConstants.MODID + ":block/steampipe_brass_medium.mwm")) .texture("None", "ros:blocks/steampipe_steel_medium") .build()); ROSOBJLoader.INSTANCE.addRetexturedModel("_steampipe_steel_large.mwm", RetextureData.builder() .originalModel(new ResourceLocation(ROSConstants.MODID + ":block/steampipe_brass_large.mwm")) .texture("None", "ros:blocks/steampipe_steel_large") .build()); MinecraftForge.EVENT_BUS.register(new ClientTickHandler()); ROSItems.ITEMS.stream().filter(item -> item instanceof IItemModelProvider) .forEach(item -> ((IItemModelProvider) item).registerVariants()); } @Override public void init(final FMLInitializationEvent e) { super.init(e); ClientRegistry.bindTileEntitySpecialRenderer(TileBelt.class, new RenderBelt()); ClientRegistry.bindTileEntitySpecialRenderer(TileStructure.class, new RenderStructure()); ClientRegistry.bindTileEntitySpecialRenderer(TileRollingMill.class, new RenderRollingMill()); ClientRegistry.bindTileEntitySpecialRenderer(TileSteamFurnaceMK2.class, new RenderSteamFurnaceMK2()); ClientRegistry.bindTileEntitySpecialRenderer(TileSawMill.class, new RenderSawMill()); ClientRegistry.bindTileEntitySpecialRenderer(TileTank.class, new RenderFluidTank()); ClientRegistry.bindTileEntitySpecialRenderer(TileFluidGauge.class, new RenderFluidGauge()); MinecraftForge.EVENT_BUS.register(new MachineClientEventHandler()); MinecraftForge.EVENT_BUS.register(new LogisticClientEventHandler()); MinecraftForge.EVENT_BUS.register(new ClientEventHandler()); } @Override public void postInit(FMLPostInitializationEvent e) { super.postInit(e); StylesheetManager.getInstance().addUserAgent(ROSConstants.MODID, "/assets/ros/css/theme.css"); } @Override public void registerItemRenderer(final Item item, final int meta) { ModelLoader.setCustomModelResourceLocation(item, meta, new ModelResourceLocation(item.getRegistryName(), "inventory")); } private static void registerFluidsClient() { ROSFluids.FLUIDS.forEach((fluid, blockFluid) -> { ModelResourceLocation fluidLocation = new ModelResourceLocation(blockFluid.getRegistryName(), fluid.getName()); ModelLoader.setCustomStateMapper(blockFluid, new StateMapperBase() { @Override protected ModelResourceLocation getModelResourceLocation(final IBlockState state) { return fluidLocation; } }); }); } @SubscribeEvent public void onModelRegistry(ModelRegistryEvent e) { ClientProxy.registerFluidsClient(); for (Item item: ROSItems.ITEMS) { if (item instanceof IItemModelProvider && ((IItemModelProvider) item).hasSpecialModel()) ((IItemModelProvider) item).registerModels(); else RiseOfSteam.proxy.registerItemRenderer(item, 0); } pipeModelRegistry = new PipeModelRegistry(); pipeModelRegistry.onModelRegistry(); } @SubscribeEvent public void onModelBake(final ModelBakeEvent e) { ModelResourceLocation key = new ModelResourceLocation(ROSItems.BLUEPRINT.getRegistryName(), "inventory"); IBakedModel originalModel = e.getModelRegistry().getObject(key); e.getModelRegistry().putObject(key, new BlueprintRender(originalModel)); pipeModelRegistry.onModelBake(e.getModelRegistry()); ModelLoader.setCustomModelResourceLocation(Item.getByNameOrId("ros:itemextractor"), 1, new ModelResourceLocation(ROSConstants.MODID + ":itemextractor", "facing=down,filter=true")); ModelLoader.setCustomModelResourceLocation(Item.getByNameOrId("ros:itemsplitter"), 1, new ModelResourceLocation(ROSConstants.MODID + ":itemsplitter", "facing=up,filter=true")); ModelLoader.setCustomModelResourceLocation(ROSItems.VALVE, 0, new ModelResourceLocation( ROSConstants.MODID + ":itemvalve", "inventory")); ModelLoader.setCustomModelResourceLocation(ROSItems.GAUGE, 0, new ModelResourceLocation( ROSConstants.MODID + ":itemgauge", "inventory")); ROSBlocks.BLOCKS.keySet().stream().filter(IModelProvider.class::isInstance).forEach(block -> { IModelProvider modelProvider = (IModelProvider) block; BiConsumer<Integer, Block> modelRegister = modelProvider.registerItemModels(); for (int i = 0; i < modelProvider.getItemModelCount(); i++) modelRegister.accept(i, block); }); } @SubscribeEvent public void onTextureLoad(TextureStitchEvent.Pre event) { // Needed for models that are lazily accessed at runtime (ex: pipes covers) ROSOBJLoader.INSTANCE.getReTexturedMap().forEach((model, data) -> data.getReplacedTextures().forEach((matKey, texture) -> event.getMap().registerSprite(new ResourceLocation(texture)))); event.getMap().registerSprite(new ResourceLocation("ros:blocks/steamvent_medium")); event.getMap().registerSprite(new ResourceLocation("ros:blocks/steamvalve_medium")); event.getMap().registerSprite(new ResourceLocation("ros:blocks/fluidgauge")); event.getMap().registerSprite(new ResourceLocation("ros:blocks/steampressurevalve_small")); event.getMap().registerSprite(new ResourceLocation("ros:blocks/steampressurevalve_medium")); event.getMap().registerSprite(new ResourceLocation("ros:blocks/steampressurevalve_large")); event.getMap().registerSprite(new ResourceLocation("ros:blocks/fluidpump_small")); } @SubscribeEvent public void onRightClick(GuiScreenEvent.MouseInputEvent event) { if (event.getGui() instanceof GuiContainer && Mouse.isButtonDown(1)) { GuiContainer container = (GuiContainer) event.getGui(); if (container.getSlotUnderMouse() != null && Minecraft.getMinecraft().player.inventory.getItemStack().isEmpty() && container.getSlotUnderMouse().getStack().getItem() == ROSItems.MULTIBLOCK_BOX) { new MultiblockBoxPacket(container.getSlotUnderMouse().slotNumber).sendToServer(); event.setCanceled(true); } } } }
package org.docksidestage.hangar.dbflute.cbean.cq.bs; import java.util.Map; import org.dbflute.cbean.*; import org.dbflute.cbean.chelper.*; import org.dbflute.cbean.coption.*; import org.dbflute.cbean.cvalue.ConditionValue; import org.dbflute.cbean.sqlclause.SqlClause; import org.dbflute.exception.IllegalConditionBeanOperationException; import org.docksidestage.hangar.dbflute.cbean.cq.ciq.*; import org.docksidestage.hangar.dbflute.cbean.*; import org.docksidestage.hangar.dbflute.cbean.cq.*; /** * The base condition-query of WHITE_BASE_ONE05_DSTORE. * @author DBFlute(AutoGenerator) */ public class BsWhiteBaseOne05DstoreCQ extends AbstractBsWhiteBaseOne05DstoreCQ { // =================================================================================== // Attribute // ========= protected WhiteBaseOne05DstoreCIQ _inlineQuery; // =================================================================================== // Constructor // =========== public BsWhiteBaseOne05DstoreCQ(ConditionQuery referrerQuery, SqlClause sqlClause, String aliasName, int nestLevel) { super(referrerQuery, sqlClause, aliasName, nestLevel); } // =================================================================================== // InlineView/OrClause // =================== /** * Prepare InlineView query. <br> * {select ... from ... left outer join (select * from WHITE_BASE_ONE05_DSTORE) where FOO = [value] ...} * <pre> * cb.query().queryMemberStatus().<span style="color: #CC4747">inline()</span>.setFoo...; * </pre> * @return The condition-query for InlineView query. (NotNull) */ public WhiteBaseOne05DstoreCIQ inline() { if (_inlineQuery == null) { _inlineQuery = xcreateCIQ(); } _inlineQuery.xsetOnClause(false); return _inlineQuery; } protected WhiteBaseOne05DstoreCIQ xcreateCIQ() { WhiteBaseOne05DstoreCIQ ciq = xnewCIQ(); ciq.xsetBaseCB(_baseCB); return ciq; } protected WhiteBaseOne05DstoreCIQ xnewCIQ() { return new WhiteBaseOne05DstoreCIQ(xgetReferrerQuery(), xgetSqlClause(), xgetAliasName(), xgetNestLevel(), this); } /** * Prepare OnClause query. <br> * {select ... from ... left outer join WHITE_BASE_ONE05_DSTORE on ... and FOO = [value] ...} * <pre> * cb.query().queryMemberStatus().<span style="color: #CC4747">on()</span>.setFoo...; * </pre> * @return The condition-query for OnClause query. (NotNull) * @throws IllegalConditionBeanOperationException When this condition-query is base query. */ public WhiteBaseOne05DstoreCIQ on() { if (isBaseQuery()) { throw new IllegalConditionBeanOperationException("OnClause for local table is unavailable!"); } WhiteBaseOne05DstoreCIQ inlineQuery = inline(); inlineQuery.xsetOnClause(true); return inlineQuery; } // =================================================================================== // Query // ===== protected ConditionValue _dstoreId; public ConditionValue xdfgetDstoreId() { if (_dstoreId == null) { _dstoreId = nCV(); } return _dstoreId; } protected ConditionValue xgetCValueDstoreId() { return xdfgetDstoreId(); } public Map<String, WhiteBaseCQ> xdfgetDstoreId_ExistsReferrer_WhiteBaseList() { return xgetSQueMap("dstoreId_ExistsReferrer_WhiteBaseList"); } public String keepDstoreId_ExistsReferrer_WhiteBaseList(WhiteBaseCQ sq) { return xkeepSQue("dstoreId_ExistsReferrer_WhiteBaseList", sq); } public Map<String, WhiteBaseCQ> xdfgetDstoreId_NotExistsReferrer_WhiteBaseList() { return xgetSQueMap("dstoreId_NotExistsReferrer_WhiteBaseList"); } public String keepDstoreId_NotExistsReferrer_WhiteBaseList(WhiteBaseCQ sq) { return xkeepSQue("dstoreId_NotExistsReferrer_WhiteBaseList", sq); } public Map<String, WhiteBaseCQ> xdfgetDstoreId_SpecifyDerivedReferrer_WhiteBaseList() { return xgetSQueMap("dstoreId_SpecifyDerivedReferrer_WhiteBaseList"); } public String keepDstoreId_SpecifyDerivedReferrer_WhiteBaseList(WhiteBaseCQ sq) { return xkeepSQue("dstoreId_SpecifyDerivedReferrer_WhiteBaseList", sq); } public Map<String, WhiteBaseCQ> xdfgetDstoreId_QueryDerivedReferrer_WhiteBaseList() { return xgetSQueMap("dstoreId_QueryDerivedReferrer_WhiteBaseList"); } public String keepDstoreId_QueryDerivedReferrer_WhiteBaseList(WhiteBaseCQ sq) { return xkeepSQue("dstoreId_QueryDerivedReferrer_WhiteBaseList", sq); } public Map<String, Object> xdfgetDstoreId_QueryDerivedReferrer_WhiteBaseListParameter() { return xgetSQuePmMap("dstoreId_QueryDerivedReferrer_WhiteBaseList"); } public String keepDstoreId_QueryDerivedReferrer_WhiteBaseListParameter(Object pm) { return xkeepSQuePm("dstoreId_QueryDerivedReferrer_WhiteBaseList", pm); } /** * Add order-by as ascend. <br> * DSTORE_ID: {PK, NotNull, INTEGER(10)} * @return this. (NotNull) */ public BsWhiteBaseOne05DstoreCQ addOrderBy_DstoreId_Asc() { regOBA("DSTORE_ID"); return this; } /** * Add order-by as descend. <br> * DSTORE_ID: {PK, NotNull, INTEGER(10)} * @return this. (NotNull) */ public BsWhiteBaseOne05DstoreCQ addOrderBy_DstoreId_Desc() { regOBD("DSTORE_ID"); return this; } protected ConditionValue _dstoreName; public ConditionValue xdfgetDstoreName() { if (_dstoreName == null) { _dstoreName = nCV(); } return _dstoreName; } protected ConditionValue xgetCValueDstoreName() { return xdfgetDstoreName(); } /** * Add order-by as ascend. <br> * DSTORE_NAME: {NotNull, VARCHAR(200)} * @return this. (NotNull) */ public BsWhiteBaseOne05DstoreCQ addOrderBy_DstoreName_Asc() { regOBA("DSTORE_NAME"); return this; } /** * Add order-by as descend. <br> * DSTORE_NAME: {NotNull, VARCHAR(200)} * @return this. (NotNull) */ public BsWhiteBaseOne05DstoreCQ addOrderBy_DstoreName_Desc() { regOBD("DSTORE_NAME"); return this; } // =================================================================================== // SpecifiedDerivedOrderBy // ======================= /** * Add order-by for specified derived column as ascend. * <pre> * cb.specify().derivedPurchaseList().max(new SubQuery&lt;PurchaseCB&gt;() { * public void query(PurchaseCB subCB) { * subCB.specify().columnPurchaseDatetime(); * } * }, <span style="color: #CC4747">aliasName</span>); * <span style="color: #3F7E5E">// order by [alias-name] asc</span> * cb.<span style="color: #CC4747">addSpecifiedDerivedOrderBy_Asc</span>(<span style="color: #CC4747">aliasName</span>); * </pre> * @param aliasName The alias name specified at (Specify)DerivedReferrer. (NotNull) * @return this. (NotNull) */ public BsWhiteBaseOne05DstoreCQ addSpecifiedDerivedOrderBy_Asc(String aliasName) { registerSpecifiedDerivedOrderBy_Asc(aliasName); return this; } /** * Add order-by for specified derived column as descend. * <pre> * cb.specify().derivedPurchaseList().max(new SubQuery&lt;PurchaseCB&gt;() { * public void query(PurchaseCB subCB) { * subCB.specify().columnPurchaseDatetime(); * } * }, <span style="color: #CC4747">aliasName</span>); * <span style="color: #3F7E5E">// order by [alias-name] desc</span> * cb.<span style="color: #CC4747">addSpecifiedDerivedOrderBy_Desc</span>(<span style="color: #CC4747">aliasName</span>); * </pre> * @param aliasName The alias name specified at (Specify)DerivedReferrer. (NotNull) * @return this. (NotNull) */ public BsWhiteBaseOne05DstoreCQ addSpecifiedDerivedOrderBy_Desc(String aliasName) { registerSpecifiedDerivedOrderBy_Desc(aliasName); return this; } // =================================================================================== // Union Query // =========== public void reflectRelationOnUnionQuery(ConditionQuery bqs, ConditionQuery uqs) { } // =================================================================================== // Foreign Query // ============= protected Map<String, Object> xfindFixedConditionDynamicParameterMap(String property) { return null; } // =================================================================================== // ScalarCondition // =============== public Map<String, WhiteBaseOne05DstoreCQ> xdfgetScalarCondition() { return xgetSQueMap("scalarCondition"); } public String keepScalarCondition(WhiteBaseOne05DstoreCQ sq) { return xkeepSQue("scalarCondition", sq); } // =================================================================================== // MyselfDerived // ============= public Map<String, WhiteBaseOne05DstoreCQ> xdfgetSpecifyMyselfDerived() { return xgetSQueMap("specifyMyselfDerived"); } public String keepSpecifyMyselfDerived(WhiteBaseOne05DstoreCQ sq) { return xkeepSQue("specifyMyselfDerived", sq); } public Map<String, WhiteBaseOne05DstoreCQ> xdfgetQueryMyselfDerived() { return xgetSQueMap("queryMyselfDerived"); } public String keepQueryMyselfDerived(WhiteBaseOne05DstoreCQ sq) { return xkeepSQue("queryMyselfDerived", sq); } public Map<String, Object> xdfgetQueryMyselfDerivedParameter() { return xgetSQuePmMap("queryMyselfDerived"); } public String keepQueryMyselfDerivedParameter(Object pm) { return xkeepSQuePm("queryMyselfDerived", pm); } // =================================================================================== // MyselfExists // ============ protected Map<String, WhiteBaseOne05DstoreCQ> _myselfExistsMap; public Map<String, WhiteBaseOne05DstoreCQ> xdfgetMyselfExists() { return xgetSQueMap("myselfExists"); } public String keepMyselfExists(WhiteBaseOne05DstoreCQ sq) { return xkeepSQue("myselfExists", sq); } // =================================================================================== // MyselfInScope // ============= public Map<String, WhiteBaseOne05DstoreCQ> xdfgetMyselfInScope() { return xgetSQueMap("myselfInScope"); } public String keepMyselfInScope(WhiteBaseOne05DstoreCQ sq) { return xkeepSQue("myselfInScope", sq); } // =================================================================================== // Very Internal // ============= // very internal (for suppressing warn about 'Not Use Import') protected String xCB() { return WhiteBaseOne05DstoreCB.class.getName(); } protected String xCQ() { return WhiteBaseOne05DstoreCQ.class.getName(); } protected String xCHp() { return HpQDRFunction.class.getName(); } protected String xCOp() { return ConditionOption.class.getName(); } protected String xMap() { return Map.class.getName(); } }
/* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.ocs.dynamo.ui.composite.grid; import java.io.Serializable; import java.util.ArrayList; import java.util.List; import java.util.function.BiFunction; import java.util.function.Function; import java.util.function.Supplier; import com.ocs.dynamo.dao.FetchJoinInformation; import com.ocs.dynamo.domain.AbstractEntity; import com.ocs.dynamo.domain.model.EntityModel; import com.ocs.dynamo.service.BaseService; import com.ocs.dynamo.ui.composite.export.PivotParameters; import com.ocs.dynamo.ui.composite.layout.FormOptions; import com.ocs.dynamo.ui.provider.BaseDataProvider; import com.ocs.dynamo.ui.provider.IdBasedDataProvider; import com.ocs.dynamo.ui.provider.PagingDataProvider; import com.ocs.dynamo.ui.provider.PivotDataProvider; import com.ocs.dynamo.ui.provider.PivotedItem; import com.ocs.dynamo.ui.provider.QueryType; import com.ocs.dynamo.ui.utils.VaadinUtils; import com.vaadin.flow.component.button.Button; import com.vaadin.flow.component.grid.Grid.SelectionMode; import com.vaadin.flow.component.grid.GridSortOrder; import com.vaadin.flow.component.grid.contextmenu.GridContextMenu; import com.vaadin.flow.component.html.Span; import com.vaadin.flow.component.orderedlayout.VerticalLayout; import com.vaadin.flow.data.provider.DataProvider; import com.vaadin.flow.data.provider.SortDirection; import com.vaadin.flow.data.provider.SortOrder; import com.vaadin.flow.function.SerializablePredicate; /** * Wrapper around a pivot grid * * @author Bas Rutten * * @param <ID> * @param <T> */ public class PivotGridWrapper<ID extends Serializable, T extends AbstractEntity<ID>> extends GridWrapper<ID, T, PivotedItem> { private static final long serialVersionUID = -4691108261565306844L; /** * The label that displays the table caption */ private Span caption = new Span(""); /** * The name of the property that contains the values that lead to the pivoted * columns */ private String columnKeyProperty; /** * The data provider */ private PivotDataProvider<ID, T> dataProvider; /** * The names of the fixed/frozen columns */ private List<String> fixedColumnKeys; /** * Function for mapping for fixed property name to grid header */ private Function<String, String> fixedHeaderMapper = Function.identity(); /** * The wrapped grid component */ private PivotGrid<ID, T> grid; /** * Bifunction used to map pivot column headers */ private BiFunction<Object, Object, String> headerMapper = (a, b) -> a.toString(); /** * The layout that contains the grid */ private VerticalLayout layout; /** * The properties to display in the pivoted columns */ private List<String> pivotedProperties; /** * The possible values of the columnPropertyKey property. */ private List<Object> possibleColumnKeys; /** * The property that is checked to determine whether a new row is reached */ private String rowKeyProperty; /** * Supplier that is used to determine the number of rows in the pivot table */ private Supplier<Integer> sizeSupplier; /** * The wrapped data provider */ private BaseDataProvider<ID, T> wrappedProvider; /** * @param service the service that is used for retrieving data * @param entityModel the entity model * @param queryType the query type to use * @param order the default sort order * @param joins options list of fetch joins to include in the query */ public PivotGridWrapper(BaseService<ID, T> service, EntityModel<T> entityModel, QueryType queryType, FormOptions formOptions, SerializablePredicate<T> filter, List<SortOrder<?>> sortOrders, FetchJoinInformation... joins) { super(service, entityModel, queryType, formOptions, filter, sortOrders, joins); } /** * Builds the component. */ @Override public void build() { layout = new VerticalLayout(); layout.add(caption); this.dataProvider = constructDataProvider(); grid = getGrid(); layout.add(grid); initSortingAndFiltering(); grid.setSelectionMode(SelectionMode.SINGLE); add(layout); } protected PivotDataProvider<ID, T> constructDataProvider() { if (QueryType.PAGING.equals(getQueryType())) { wrappedProvider = new PagingDataProvider<>(getService(), getEntityModel(), getFormOptions().isShowNextButton() || getFormOptions().isShowPrevButton(), getJoins()); } else { wrappedProvider = new IdBasedDataProvider<>(getService(), getEntityModel(), getJoins()); } PivotDataProvider<ID, T> pivotDataProvider = new PivotDataProvider<>(wrappedProvider, rowKeyProperty, columnKeyProperty, fixedColumnKeys, pivotedProperties, sizeSupplier); pivotDataProvider.setAfterCountCompleted(x -> updateCaption(x)); postProcessDataProvider(pivotDataProvider); return pivotDataProvider; } /** * Constructs the grid - override in subclasses if you need a different grid * implementation * * @return */ protected PivotGrid<ID, T> constructGrid() { return new PivotGrid<>(dataProvider, possibleColumnKeys, fixedHeaderMapper, headerMapper); } public String getColumnKeyProperty() { return columnKeyProperty; } public DataProvider<PivotedItem, SerializablePredicate<PivotedItem>> getDataProvider() { return dataProvider; } @Override public int getDataProviderSize() { return dataProvider.getSize(); } public List<String> getFixedColumnKeys() { return fixedColumnKeys; } public Function<String, String> getFixedHeaderMapper() { return fixedHeaderMapper; } /** * Lazily construct and return the grid * * @return */ public PivotGrid<ID, T> getGrid() { if (grid == null) { grid = constructGrid(); } return grid; } public BiFunction<Object, Object, String> getHeaderMapper() { return headerMapper; } public List<String> getPivotedProperties() { return pivotedProperties; } public List<Object> getPossibleColumnKeys() { return possibleColumnKeys; } public String getRowKeyProperty() { return rowKeyProperty; } public Supplier<Integer> getSizeSupplier() { return sizeSupplier; } /** * Extracts the sort directions from the sort orders */ protected boolean[] getSortDirections() { boolean[] result = new boolean[getSortOrders().size()]; for (int i = 0; i < result.length; i++) { result[i] = SortDirection.ASCENDING == getSortOrders().get(i).getDirection(); } return result; } /** * Initializes the sorting and filtering for the grid */ @Override @SuppressWarnings({ "unchecked", "rawtypes" }) protected List<SortOrder<?>> initSortingAndFiltering() { // pass along initial filter getGrid().getDataCommunicator().setDataProvider(getDataProvider(), (SerializablePredicate) getFilter()); List<SortOrder<?>> fallbackOrders = super.initSortingAndFiltering(); // set fall back sort orders if (wrappedProvider instanceof BaseDataProvider) { ((BaseDataProvider<ID, T>) wrappedProvider).setFallBackSortOrders(fallbackOrders); } if (getFormOptions().isExportAllowed() && getExportDelegate() != null) { GridContextMenu<PivotedItem> contextMenu = getGrid().addContextMenu(); Button downloadButton = new Button(message("ocs.download")); downloadButton.addClickListener(event -> { List<SortOrder<?>> orders = new ArrayList<>(); List<GridSortOrder<PivotedItem>> so = getGrid().getSortOrder(); for (GridSortOrder<PivotedItem> gso : so) { orders.add(new SortOrder<String>(gso.getSorted().getKey(), gso.getDirection())); } PivotParameters pars = new PivotParameters(); pars.setColumnKeyProperty(columnKeyProperty); pars.setFixedColumnKeys(fixedColumnKeys); pars.setHeaderMapper(headerMapper); pars.setFixedHeaderMapper(fixedHeaderMapper); pars.setPivotedProperties(pivotedProperties); pars.setPossibleColumnKeys(possibleColumnKeys); pars.setRowKeyProperty(rowKeyProperty); // use the fallback sort orders here getExportDelegate().exportPivoted( getExportEntityModel() != null ? getExportEntityModel() : getEntityModel(), getFilter(), fallbackOrders, pars, getExportJoins() != null ? getExportJoins() : getJoins()); }); contextMenu.add(downloadButton); } return fallbackOrders; } /** * Respond to a selection of an item in the grid */ protected void onSelect(Object selected) { // overwrite in subclasses } /** * Callback method used to modify data provider creation * * @param container */ protected void postProcessDataProvider(PivotDataProvider<ID, T> provider) { // overwrite in subclasses } @Override public void reloadDataProvider() { // not needed } public void setColumnKeyProperty(String columnKeyProperty) { this.columnKeyProperty = columnKeyProperty; } public void setFixedColumnKeys(List<String> fixedColumnKeys) { this.fixedColumnKeys = fixedColumnKeys; } public void setFixedHeaderMapper(Function<String, String> fixedHeaderMapper) { this.fixedHeaderMapper = fixedHeaderMapper; } public void setHeaderMapper(BiFunction<Object, Object, String> headerMapper) { this.headerMapper = headerMapper; } public void setPivotedProperties(List<String> pivotedProperties) { this.pivotedProperties = pivotedProperties; } public void setPossibleColumnKeys(List<Object> possibleColumnKeys) { this.possibleColumnKeys = possibleColumnKeys; } public void setRowKeyProperty(String rowKeyProperty) { this.rowKeyProperty = rowKeyProperty; } public void setSizeSupplier(Supplier<Integer> sizeSupplier) { this.sizeSupplier = sizeSupplier; } /** * Updates the caption above the grid that shows the number of items * * @param size the number of items */ protected void updateCaption(int size) { caption.setText(getEntityModel().getDisplayNamePlural(VaadinUtils.getLocale()) + " " + getMessageService().getMessage("ocs.showing.results", VaadinUtils.getLocale(), size)); } }