gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.carbondata.processing.store; import java.io.File; import java.io.FileFilter; import java.util.AbstractQueue; import java.util.PriorityQueue; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import org.apache.carbondata.common.CarbonIterator; import org.apache.carbondata.common.logging.LogService; import org.apache.carbondata.common.logging.LogServiceFactory; import org.apache.carbondata.core.constants.CarbonCommonConstants; import org.apache.carbondata.core.util.CarbonProperties; import org.apache.carbondata.processing.sortandgroupby.exception.CarbonSortKeyAndGroupByException; import org.apache.carbondata.processing.sortandgroupby.sortdata.SortTempFileChunkHolder; import org.apache.carbondata.processing.store.writer.exception.CarbonDataWriterException; import org.apache.carbondata.processing.util.CarbonDataProcessorUtil; public class SingleThreadFinalSortFilesMerger extends CarbonIterator<Object[]> { /** * LOGGER */ private static final LogService LOGGER = LogServiceFactory.getLogService(SingleThreadFinalSortFilesMerger.class.getName()); /** * lockObject */ private static final Object LOCKOBJECT = new Object(); /** * fileCounter */ private int fileCounter; /** * fileBufferSize */ private int fileBufferSize; /** * recordHolderHeap */ private AbstractQueue<SortTempFileChunkHolder> recordHolderHeapLocal; /** * tableName */ private String tableName; /** * measureCount */ private int measureCount; /** * dimensionCount */ private int dimensionCount; /** * measure count */ private int noDictionaryCount; /** * complexDimensionCount */ private int complexDimensionCount; /** * tempFileLocation */ private String tempFileLocation; private char[] aggType; /** * below code is to check whether dimension * is of no dictionary type or not */ private boolean[] isNoDictionaryColumn; public SingleThreadFinalSortFilesMerger(String tempFileLocation, String tableName, int dimensionCount, int complexDimensionCount, int measureCount, int noDictionaryCount, char[] aggType, boolean[] isNoDictionaryColumn) { this.tempFileLocation = tempFileLocation; this.tableName = tableName; this.dimensionCount = dimensionCount; this.complexDimensionCount = complexDimensionCount; this.measureCount = measureCount; this.aggType = aggType; this.noDictionaryCount = noDictionaryCount; this.isNoDictionaryColumn = isNoDictionaryColumn; } /** * This method will be used to merger the merged files * * @throws CarbonSortKeyAndGroupByException */ public void startFinalMerge() throws CarbonDataWriterException { // get all the merged files File file = new File(tempFileLocation); File[] fileList = file.listFiles(new FileFilter() { public boolean accept(File pathname) { return pathname.getName().startsWith(tableName); } }); if (null == fileList || fileList.length < 0) { return; } startSorting(fileList); } /** * Below method will be used to start storing process This method will get * all the temp files present in sort temp folder then it will create the * record holder heap and then it will read first record from each file and * initialize the heap * * @throws CarbonSortKeyAndGroupByException */ private void startSorting(File[] files) throws CarbonDataWriterException { this.fileCounter = files.length; this.fileBufferSize = CarbonDataProcessorUtil .getFileBufferSize(this.fileCounter, CarbonProperties.getInstance(), CarbonCommonConstants.CONSTANT_SIZE_TEN); LOGGER.info("Number of temp file: " + this.fileCounter); LOGGER.info("File Buffer Size: " + this.fileBufferSize); // create record holder heap createRecordHolderQueue(files); // iterate over file list and create chunk holder and add to heap LOGGER.info("Started adding first record from each file"); int maxThreadForSorting = 0; try { maxThreadForSorting = Integer.parseInt(CarbonProperties.getInstance() .getProperty(CarbonCommonConstants.CARBON_MERGE_SORT_READER_THREAD, CarbonCommonConstants.CARBON_MERGE_SORT_READER_THREAD_DEFAULTVALUE)); } catch (NumberFormatException e) { maxThreadForSorting = Integer.parseInt(CarbonCommonConstants.CARBON_MERGE_SORT_READER_THREAD_DEFAULTVALUE); } ExecutorService service = Executors.newFixedThreadPool(maxThreadForSorting); for (final File tempFile : files) { Callable<Void> runnable = new Callable<Void>() { @Override public Void call() throws CarbonSortKeyAndGroupByException { // create chunk holder SortTempFileChunkHolder sortTempFileChunkHolder = new SortTempFileChunkHolder(tempFile, dimensionCount, complexDimensionCount, measureCount, fileBufferSize, noDictionaryCount, aggType, isNoDictionaryColumn); // initialize sortTempFileChunkHolder.initialize(); sortTempFileChunkHolder.readRow(); synchronized (LOCKOBJECT) { recordHolderHeapLocal.add(sortTempFileChunkHolder); } // add to heap return null; } }; service.submit(runnable); } service.shutdown(); try { service.awaitTermination(2, TimeUnit.HOURS); } catch (Exception e) { throw new CarbonDataWriterException(e.getMessage(), e); } LOGGER.info("Heap Size" + this.recordHolderHeapLocal.size()); } /** * This method will be used to create the heap which will be used to hold * the chunk of data * * @param listFiles list of temp files */ private void createRecordHolderQueue(File[] listFiles) { // creating record holder heap this.recordHolderHeapLocal = new PriorityQueue<SortTempFileChunkHolder>(listFiles.length); } /** * This method will be used to get the sorted row * * @return sorted row * @throws CarbonSortKeyAndGroupByException */ public Object[] next() { return getSortedRecordFromFile(); } /** * This method will be used to get the sorted record from file * * @return sorted record sorted record * @throws CarbonSortKeyAndGroupByException */ private Object[] getSortedRecordFromFile() throws CarbonDataWriterException { Object[] row = null; // poll the top object from heap // heap maintains binary tree which is based on heap condition that will // be based on comparator we are passing the heap // when will call poll it will always delete root of the tree and then // it does trickel down operation complexity is log(n) SortTempFileChunkHolder poll = this.recordHolderHeapLocal.poll(); // get the row from chunk row = poll.getRow(); // check if there no entry present if (!poll.hasNext()) { // if chunk is empty then close the stream poll.closeStream(); // change the file counter --this.fileCounter; // reaturn row return row; } // read new row try { poll.readRow(); } catch (CarbonSortKeyAndGroupByException e) { throw new CarbonDataWriterException(e.getMessage(), e); } // add to heap this.recordHolderHeapLocal.add(poll); // return row return row; } /** * This method will be used to check whether any more element is present or * not * * @return more element is present */ public boolean hasNext() { return this.fileCounter > 0; } public void clear() { if (null != recordHolderHeapLocal) { recordHolderHeapLocal = null; } } }
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.openapi.editor.impl; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.RangeMarker; import com.intellij.openapi.editor.event.DocumentEvent; import com.intellij.openapi.editor.ex.DocumentEx; import com.intellij.openapi.editor.ex.RangeMarkerEx; import com.intellij.openapi.editor.impl.event.DocumentEventImpl; import com.intellij.openapi.fileEditor.FileDocumentManager; import com.intellij.openapi.fileTypes.BinaryFileTypeDecompilers; import com.intellij.openapi.fileTypes.FileType; import com.intellij.openapi.util.ProperTextRange; import com.intellij.openapi.util.TextRange; import com.intellij.openapi.util.UnfairTextRange; import com.intellij.openapi.util.UserDataHolderBase; import com.intellij.openapi.util.io.FileUtilRt; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.util.DocumentUtil; import com.intellij.util.ObjectUtils; import com.intellij.util.diff.FilesTooBigForDiffException; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; public class RangeMarkerImpl extends UserDataHolderBase implements RangeMarkerEx { private static final Logger LOG = Logger.getInstance(RangeMarkerImpl.class); @NotNull private final Object myDocumentOrFile; // either VirtualFile (if any) or DocumentEx if no file associated RangeMarkerTree.RMNode<RangeMarkerEx> myNode; private final long myId; private static final StripedIDGenerator counter = new StripedIDGenerator(); RangeMarkerImpl(@NotNull DocumentEx document, int start, int end, boolean register, boolean forceDocumentStrongReference) { this(forceDocumentStrongReference ? document : ObjectUtils.notNull(FileDocumentManager.getInstance().getFile(document), document), document.getTextLength(), start, end, register, false, false); } // constructor which creates marker without document and saves it in the virtual file directly. Can be cheaper than loading document. RangeMarkerImpl(@NotNull VirtualFile virtualFile, int start, int end, boolean register) { // unfortunately we don't know the exact document size until we load it this(virtualFile, estimateDocumentLength(virtualFile), start, end, register, false, false); } private static int estimateDocumentLength(@NotNull VirtualFile virtualFile) { Document document = FileDocumentManager.getInstance().getCachedDocument(virtualFile); return document == null ? Integer.MAX_VALUE : document.getTextLength(); } private RangeMarkerImpl(@NotNull Object documentOrFile, int documentTextLength, int start, int end, boolean register, boolean greedyToLeft, boolean greedyToRight) { if (start < 0) { throw new IllegalArgumentException("Wrong start: " + start+"; end="+end); } if (end > documentTextLength) { throw new IllegalArgumentException("Wrong end: " + end + "; document length=" + documentTextLength + "; start=" + start); } if (start > end){ throw new IllegalArgumentException("start > end: start=" + start+"; end="+end); } myDocumentOrFile = documentOrFile; myId = counter.next(); if (register) { registerInTree(start, end, greedyToLeft, greedyToRight, 0); } } protected void registerInTree(int start, int end, boolean greedyToLeft, boolean greedyToRight, int layer) { getDocument().registerRangeMarker(this, start, end, greedyToLeft, greedyToRight, layer); } protected void unregisterInTree() { if (!isValid()) return; IntervalTreeImpl<?> tree = myNode.getTree(); tree.checkMax(true); DocumentEx document = getCachedDocument(); if (document == null) { myNode = null; } else { document.removeRangeMarker(this); } tree.checkMax(true); } @Override public long getId() { return myId; } @Override public void dispose() { unregisterInTree(); } @Override public int getStartOffset() { RangeMarkerTree.RMNode<?> node = myNode; return node == null ? -1 : node.intervalStart() + node.computeDeltaUpToRoot(); } @Override public int getEndOffset() { RangeMarkerTree.RMNode<?> node = myNode; return node == null ? -1 : node.intervalEnd() + node.computeDeltaUpToRoot(); } void invalidate(@NotNull final Object reason) { setValid(false); RangeMarkerTree.RMNode<?> node = myNode; if (node != null) { node.processAliveKeys(markerEx -> { myNode.getTree().beforeRemove(markerEx, reason); return true; }); } } @Override @NotNull public final DocumentEx getDocument() { Object file = myDocumentOrFile; DocumentEx document = file instanceof VirtualFile ? (DocumentEx)FileDocumentManager.getInstance().getDocument((VirtualFile)file) : (DocumentEx)file; if (document == null) { LOG.error("document is null; isValid=" + isValid()+"; file="+file); } return document; } DocumentEx getCachedDocument() { Object file = myDocumentOrFile; return file instanceof VirtualFile ? (DocumentEx)FileDocumentManager.getInstance().getCachedDocument((VirtualFile)file) : (DocumentEx)file; } // fake method to simplify setGreedyToLeft/right methods. overridden in RangeHighlighter public int getLayer() { return 0; } @Override public void setGreedyToLeft(final boolean greedy) { if (!isValid() || greedy == isGreedyToLeft()) return; myNode.getTree().changeData(this, getStartOffset(), getEndOffset(), greedy, isGreedyToRight(), isStickingToRight(), getLayer()); } @Override public void setGreedyToRight(final boolean greedy) { if (!isValid() || greedy == isGreedyToRight()) return; myNode.getTree().changeData(this, getStartOffset(), getEndOffset(), isGreedyToLeft(), greedy, isStickingToRight(), getLayer()); } public void setStickingToRight(boolean value) { if (!isValid() || value == isStickingToRight()) return; myNode.getTree().changeData(this, getStartOffset(), getEndOffset(), isGreedyToLeft(), isGreedyToRight(), value, getLayer()); } @Override public boolean isGreedyToLeft() { RangeMarkerTree.RMNode<?> node = myNode; return node != null && node.isGreedyToLeft(); } @Override public boolean isGreedyToRight() { RangeMarkerTree.RMNode<?> node = myNode; return node != null && node.isGreedyToRight(); } public boolean isStickingToRight() { RangeMarkerTree.RMNode<?> node = myNode; return node != null && node.isStickingToRight(); } @Override public final void documentChanged(@NotNull DocumentEvent e) { int oldStart = intervalStart(); int oldEnd = intervalEnd(); int docLength = e.getDocument().getTextLength(); if (!isValid()) { LOG.error("Invalid range marker "+ (isGreedyToLeft() ? "[" : "(") + oldStart + ", " + oldEnd + (isGreedyToRight() ? "]" : ")") + ". Event = " + e + ". Doc length=" + docLength + "; "+getClass()); return; } if (oldStart > oldEnd || oldStart < 0 || oldEnd > docLength - e.getNewLength() + e.getOldLength()) { LOG.error("RangeMarker" + (isGreedyToLeft() ? "[" : "(") + oldStart + ", " + oldEnd + (isGreedyToRight() ? "]" : ")") + " is invalid before update. Event = " + e + ". Doc length=" + docLength + "; "+getClass()); invalidate(e); return; } changedUpdateImpl(e); int newStart; int newEnd; if (isValid() && ((newStart=intervalStart()) > (newEnd=intervalEnd()) || newStart < 0 || newEnd > docLength)) { LOG.error("Update failed. Event = " + e + ". " + "Doc length=" + docLength + "; "+getClass()+". Before update: " + (isGreedyToLeft() ? "[" : "(") + oldStart + ", " + oldEnd + (isGreedyToRight() ? "]" : ")") + " After update: '"+this+"'"); invalidate(e); } } protected void changedUpdateImpl(@NotNull DocumentEvent e) { doChangeUpdate(e); } private void doChangeUpdate(@NotNull DocumentEvent e) { if (!isValid()) return; TextRange newRange = applyChange(e, intervalStart(), intervalEnd(), isGreedyToLeft(), isGreedyToRight(), isStickingToRight()); if (newRange == null) { invalidate(e); return; } setIntervalStart(newRange.getStartOffset()); setIntervalEnd(newRange.getEndOffset()); } protected void persistentHighlighterUpdate(@NotNull DocumentEvent e, boolean wholeLineRange) { int line = 0; DocumentEventImpl event = (DocumentEventImpl)e; boolean viaDiff = isValid() && PersistentRangeMarkerUtil.shouldTranslateViaDiff(event, getStartOffset(), getEndOffset()); if (viaDiff) { try { line = event.getLineNumberBeforeUpdate(getStartOffset()); line = translatedViaDiff(event, line); } catch (FilesTooBigForDiffException exception) { viaDiff = false; } } if (!viaDiff) { doChangeUpdate(e); if (isValid()) { line = getDocument().getLineNumber(getStartOffset()); int endLine = getDocument().getLineNumber(getEndOffset()); if (endLine != line) { setIntervalEnd(getDocument().getLineEndOffset(line)); } } } if (isValid() && wholeLineRange) { setIntervalStart(DocumentUtil.getFirstNonSpaceCharOffset(getDocument(), line)); setIntervalEnd(getDocument().getLineEndOffset(line)); } } private int translatedViaDiff(@NotNull DocumentEventImpl e, int line) throws FilesTooBigForDiffException { line = e.translateLineViaDiff(line); if (line < 0 || line >= getDocument().getLineCount()) { invalidate(e); } else { DocumentEx document = getDocument(); setIntervalStart(document.getLineStartOffset(line)); setIntervalEnd(document.getLineEndOffset(line)); } return line; } // Called after the range was shifted from e.getMoveOffset() to e.getOffset() protected void onReTarget(@NotNull DocumentEvent e) {} @Nullable static TextRange applyChange(@NotNull DocumentEvent e, int intervalStart, int intervalEnd, boolean isGreedyToLeft, boolean isGreedyToRight, boolean isStickingToRight) { if (intervalStart == intervalEnd) { return processIfOnePoint(e, intervalStart, isGreedyToRight, isStickingToRight); } final int offset = e.getOffset(); final int oldLength = e.getOldLength(); final int newLength = e.getNewLength(); // changes after the end. if (intervalEnd < offset) { return new UnfairTextRange(intervalStart, intervalEnd); } if (!isGreedyToRight && intervalEnd == offset) { // handle replaceString that was minimized and resulted in insertString at the range end if (e instanceof DocumentEventImpl && oldLength == 0 && ((DocumentEventImpl)e).getInitialStartOffset() < offset) { return new UnfairTextRange(intervalStart, intervalEnd + newLength); } return new UnfairTextRange(intervalStart, intervalEnd); } // changes before start if (intervalStart > offset + oldLength) { return new UnfairTextRange(intervalStart + newLength - oldLength, intervalEnd + newLength - oldLength); } if (!isGreedyToLeft && intervalStart == offset + oldLength) { // handle replaceString that was minimized and resulted in insertString at the range start if (e instanceof DocumentEventImpl && oldLength == 0 && ((DocumentEventImpl)e).getInitialStartOffset() + ((DocumentEventImpl)e).getInitialOldLength() > offset) { return new UnfairTextRange(intervalStart, intervalEnd + newLength); } return new UnfairTextRange(intervalStart + newLength - oldLength, intervalEnd + newLength - oldLength); } // Changes inside marker's area. Expand/collapse. if (intervalStart <= offset && intervalEnd >= offset + oldLength) { return new ProperTextRange(intervalStart, intervalEnd + newLength - oldLength); } // At this point we either have (myStart xor myEnd inside changed area) or whole area changed. // Replacing prefix or suffix... if (intervalStart >= offset && intervalStart <= offset + oldLength && intervalEnd > offset + oldLength) { return new ProperTextRange(offset + newLength, intervalEnd + newLength - oldLength); } if (intervalEnd >= offset && intervalEnd <= offset + oldLength && intervalStart < offset) { return new UnfairTextRange(intervalStart, offset); } return null; } @Nullable private static TextRange processIfOnePoint(@NotNull DocumentEvent e, int intervalStart, boolean greedyRight, boolean stickyRight) { int offset = e.getOffset(); int oldLength = e.getOldLength(); int oldEnd = offset + oldLength; if (offset < intervalStart && intervalStart < oldEnd) { return null; } if (offset == intervalStart && oldLength == 0) { if (greedyRight) { return new UnfairTextRange(intervalStart, intervalStart + e.getNewLength()); } else if (stickyRight) { return new UnfairTextRange(intervalStart + e.getNewLength(), intervalStart + e.getNewLength()); } } if (intervalStart > oldEnd || intervalStart == oldEnd && oldLength > 0) { return new UnfairTextRange(intervalStart + e.getNewLength() - oldLength, intervalStart + e.getNewLength() - oldLength); } return new UnfairTextRange(intervalStart, intervalStart); } @Override @NonNls public String toString() { return "RangeMarker" + (isGreedyToLeft() ? "[" : "(") + (isValid() ? "" : "invalid:") + getStartOffset() + "," + getEndOffset() + (isGreedyToRight() ? "]" : ")") + " " + getId(); } int setIntervalStart(int start) { if (start < 0) { LOG.error("Negative start: " + start); } return myNode.setIntervalStart(start); } int setIntervalEnd(int end) { if (end < 0) { LOG.error("Negative end: "+end); } return myNode.setIntervalEnd(end); } @Override public boolean isValid() { RangeMarkerTree.RMNode<?> node = myNode; if (node == null || !node.isValid()) return false; Object file = myDocumentOrFile; return file instanceof Document || canHaveDocument((VirtualFile)file); } private static boolean canHaveDocument(@NotNull VirtualFile file) { Document document = FileDocumentManager.getInstance().getCachedDocument(file); if (document != null) return true; if (!file.isValid() || file.isDirectory() || isBinaryWithoutDecompiler(file)) return false; return !file.getFileType().isBinary() || !FileUtilRt.isTooLarge(file.getLength()); } private static boolean isBinaryWithoutDecompiler(@NotNull VirtualFile file) { final FileType fileType = file.getFileType(); return fileType.isBinary() && BinaryFileTypeDecompilers.getInstance().forFileType(fileType) == null; } public boolean setValid(boolean value) { RangeMarkerTree.RMNode<?> node = myNode; return node == null || node.setValid(value); } public int intervalStart() { RangeMarkerTree.RMNode<?> node = myNode; if (node == null) { return -1; } return node.intervalStart(); } public int intervalEnd() { RangeMarkerTree.RMNode<?> node = myNode; if (node == null) { return -1; } return node.intervalEnd(); } public RangeMarker findRangeMarkerAfter() { return myNode.getTree().findRangeMarkerAfter(this); } public RangeMarker findRangeMarkerBefore() { return myNode.getTree().findRangeMarkerBefore(this); } }
/* * Licensed to GraphHopper GmbH under one or more contributor * license agreements. See the NOTICE file distributed with this work for * additional information regarding copyright ownership. * * GraphHopper GmbH licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.graphhopper.storage; import org.slf4j.LoggerFactory; import java.io.File; import java.io.IOException; import java.io.RandomAccessFile; import java.nio.ByteOrder; import java.util.Arrays; /** * This is an in-memory byte-based data structure with the possibility to be stored on flush(). * Thread safe. * <p> * * @author Peter Karich */ public class RAMDataAccess extends AbstractDataAccess { private byte[][] segments = new byte[0][]; private boolean store; RAMDataAccess(String name, String location, boolean store, ByteOrder order) { super(name, location, order); this.store = store; } /** * @param store true if in-memory data should be saved when calling flush */ public RAMDataAccess store(boolean store) { this.store = store; return this; } @Override public boolean isStoring() { return store; } @Override public DataAccess copyTo(DataAccess da) { if (da instanceof RAMDataAccess) { copyHeader(da); RAMDataAccess rda = (RAMDataAccess) da; // TODO PERFORMANCE we could reuse rda segments! rda.segments = new byte[segments.length][]; for (int i = 0; i < segments.length; i++) { byte[] area = segments[i]; rda.segments[i] = Arrays.copyOf(area, area.length); } rda.setSegmentSize(segmentSizeInBytes); // leave id, store and close unchanged return da; } else { return super.copyTo(da); } } @Override public RAMDataAccess create(long bytes) { if (segments.length > 0) throw new IllegalThreadStateException("already created"); // initialize transient values setSegmentSize(segmentSizeInBytes); ensureCapacity(Math.max(10 * 4, bytes)); return this; } @Override public boolean ensureCapacity(long bytes) { if (bytes < 0) throw new IllegalArgumentException("new capacity has to be strictly positive"); long cap = getCapacity(); long newBytes = bytes - cap; if (newBytes <= 0) return false; int segmentsToCreate = (int) (newBytes / segmentSizeInBytes); if (newBytes % segmentSizeInBytes != 0) segmentsToCreate++; try { byte[][] newSegs = Arrays.copyOf(segments, segments.length + segmentsToCreate); for (int i = segments.length; i < newSegs.length; i++) { newSegs[i] = new byte[1 << segmentSizePower]; } segments = newSegs; } catch (OutOfMemoryError err) { throw new OutOfMemoryError(err.getMessage() + " - problem when allocating new memory. Old capacity: " + cap + ", new bytes:" + newBytes + ", segmentSizeIntsPower:" + segmentSizePower + ", new segments:" + segmentsToCreate + ", existing:" + segments.length); } return true; } @Override public boolean loadExisting() { if (segments.length > 0) throw new IllegalStateException("already initialized"); if (isClosed()) throw new IllegalStateException("already closed"); if (!store) return false; File file = new File(getFullName()); if (!file.exists() || file.length() == 0) return false; try { RandomAccessFile raFile = new RandomAccessFile(getFullName(), "r"); try { long byteCount = readHeader(raFile) - HEADER_OFFSET; if (byteCount < 0) return false; raFile.seek(HEADER_OFFSET); // raFile.readInt() <- too slow int segmentCount = (int) (byteCount / segmentSizeInBytes); if (byteCount % segmentSizeInBytes != 0) segmentCount++; segments = new byte[segmentCount][]; for (int s = 0; s < segmentCount; s++) { byte[] bytes = new byte[segmentSizeInBytes]; int read = raFile.read(bytes); if (read <= 0) throw new IllegalStateException("segment " + s + " is empty? " + toString()); segments[s] = bytes; } return true; } finally { raFile.close(); } } catch (IOException ex) { throw new RuntimeException("Problem while loading " + getFullName(), ex); } } @Override public void flush() { if (closed) throw new IllegalStateException("already closed"); if (!store) return; try { RandomAccessFile raFile = new RandomAccessFile(getFullName(), "rw"); try { long len = getCapacity(); writeHeader(raFile, len, segmentSizeInBytes); raFile.seek(HEADER_OFFSET); // raFile.writeInt() <- too slow, so copy into byte array for (int s = 0; s < segments.length; s++) { byte area[] = segments[s]; raFile.write(area); } } finally { raFile.close(); } } catch (Exception ex) { throw new RuntimeException("Couldn't store bytes to " + toString(), ex); } } @Override public final void setInt(long bytePos, int value) { assert segmentSizePower > 0 : "call create or loadExisting before usage!"; int bufferIndex = (int) (bytePos >>> segmentSizePower); int index = (int) (bytePos & indexDivisor); assert index + 4 <= segmentSizeInBytes : "integer cannot be distributed over two segments"; bitUtil.fromInt(segments[bufferIndex], value, index); } @Override public final int getInt(long bytePos) { assert segmentSizePower > 0 : "call create or loadExisting before usage!"; int bufferIndex = (int) (bytePos >>> segmentSizePower); int index = (int) (bytePos & indexDivisor); assert index + 4 <= segmentSizeInBytes : "integer cannot be distributed over two segments"; if (bufferIndex > segments.length) { LoggerFactory.getLogger(getClass()).error(getName() + ", segments:" + segments.length + ", bufIndex:" + bufferIndex + ", bytePos:" + bytePos + ", segPower:" + segmentSizePower); } return bitUtil.toInt(segments[bufferIndex], index); } @Override public final void setShort(long bytePos, short value) { assert segmentSizePower > 0 : "call create or loadExisting before usage!"; int bufferIndex = (int) (bytePos >>> segmentSizePower); int index = (int) (bytePos & indexDivisor); assert index + 2 <= segmentSizeInBytes : "integer cannot be distributed over two segments"; bitUtil.fromShort(segments[bufferIndex], value, index); } @Override public final short getShort(long bytePos) { assert segmentSizePower > 0 : "call create or loadExisting before usage!"; int bufferIndex = (int) (bytePos >>> segmentSizePower); int index = (int) (bytePos & indexDivisor); assert index + 2 <= segmentSizeInBytes : "integer cannot be distributed over two segments"; return bitUtil.toShort(segments[bufferIndex], index); } @Override public void setBytes(long bytePos, byte[] values, int length) { assert length <= segmentSizeInBytes : "the length has to be smaller or equal to the segment size: " + length + " vs. " + segmentSizeInBytes; assert segmentSizePower > 0 : "call create or loadExisting before usage!"; int bufferIndex = (int) (bytePos >>> segmentSizePower); int index = (int) (bytePos & indexDivisor); byte[] seg = segments[bufferIndex]; int delta = index + length - segmentSizeInBytes; if (delta > 0) { length -= delta; System.arraycopy(values, 0, seg, index, length); seg = segments[bufferIndex + 1]; System.arraycopy(values, length, seg, 0, delta); } else { System.arraycopy(values, 0, seg, index, length); } } @Override public void getBytes(long bytePos, byte[] values, int length) { assert length <= segmentSizeInBytes : "the length has to be smaller or equal to the segment size: " + length + " vs. " + segmentSizeInBytes; assert segmentSizePower > 0 : "call create or loadExisting before usage!"; int bufferIndex = (int) (bytePos >>> segmentSizePower); int index = (int) (bytePos & indexDivisor); byte[] seg = segments[bufferIndex]; int delta = index + length - segmentSizeInBytes; if (delta > 0) { length -= delta; System.arraycopy(seg, index, values, 0, length); seg = segments[bufferIndex + 1]; System.arraycopy(seg, 0, values, length, delta); } else { System.arraycopy(seg, index, values, 0, length); } } @Override public void close() { super.close(); segments = new byte[0][]; closed = true; } @Override public long getCapacity() { return (long) getSegments() * segmentSizeInBytes; } @Override public int getSegments() { return segments.length; } @Override public void trimTo(long capacity) { if (capacity > getCapacity()) { throw new IllegalStateException("Cannot increase capacity (" + getCapacity() + ") to " + capacity + " via trimTo. Use ensureCapacity instead. "); } if (capacity < segmentSizeInBytes) capacity = segmentSizeInBytes; int remainingSegments = (int) (capacity / segmentSizeInBytes); if (capacity % segmentSizeInBytes != 0) { remainingSegments++; } segments = Arrays.copyOf(segments, remainingSegments); } @Override public void rename(String newName) { if (!checkBeforeRename(newName)) { return; } if (store) { super.rename(newName); } // in every case set the name name = newName; } @Override public DAType getType() { if (isStoring()) return DAType.RAM_STORE; return DAType.RAM; } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.server; import com.facebook.presto.dispatcher.DispatchManager; import com.facebook.presto.execution.QueryInfo; import com.facebook.presto.execution.QueryManager; import com.facebook.presto.execution.QueryState; import com.facebook.presto.execution.StageId; import com.facebook.presto.metadata.InternalNode; import com.facebook.presto.metadata.InternalNodeManager; import com.facebook.presto.resourcemanager.ResourceManagerProxy; import com.facebook.presto.spi.PrestoException; import com.facebook.presto.spi.QueryId; import com.google.common.collect.ImmutableList; import com.google.common.collect.Ordering; import javax.annotation.security.RolesAllowed; import javax.inject.Inject; import javax.servlet.http.HttpServletRequest; import javax.ws.rs.DELETE; import javax.ws.rs.GET; import javax.ws.rs.HeaderParam; import javax.ws.rs.PUT; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.QueryParam; import javax.ws.rs.WebApplicationException; import javax.ws.rs.container.AsyncResponse; import javax.ws.rs.container.Suspended; import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.Status; import javax.ws.rs.core.UriInfo; import java.net.URI; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.NoSuchElementException; import java.util.Optional; import static com.facebook.presto.connector.system.KillQueryProcedure.createKillQueryException; import static com.facebook.presto.connector.system.KillQueryProcedure.createPreemptQueryException; import static com.facebook.presto.execution.QueryState.FAILED; import static com.facebook.presto.execution.QueryState.QUEUED; import static com.facebook.presto.execution.QueryState.RUNNING; import static com.facebook.presto.server.security.RoleType.ADMIN; import static com.facebook.presto.server.security.RoleType.USER; import static com.google.common.base.MoreObjects.firstNonNull; import static com.google.common.base.Preconditions.checkState; import static com.google.common.net.HttpHeaders.X_FORWARDED_PROTO; import static java.lang.String.format; import static java.util.Comparator.comparing; import static java.util.Comparator.comparingInt; import static java.util.Objects.requireNonNull; import static javax.ws.rs.core.Response.Status.BAD_REQUEST; import static javax.ws.rs.core.Response.Status.NO_CONTENT; import static javax.ws.rs.core.Response.Status.SERVICE_UNAVAILABLE; /** * Manage queries scheduled on this node */ @Path("/v1/query") @RolesAllowed({USER, ADMIN}) public class QueryResource { public static final Comparator<BasicQueryInfo> QUERIES_ORDERING = Ordering .<BasicQueryInfo>from(comparingInt( basicQueryInfo -> { if (basicQueryInfo.getState() == RUNNING) { return 0; } else if (basicQueryInfo.getState() == QUEUED) { return 1; } else if (!basicQueryInfo.getState().isDone()) { return 2; } else if (basicQueryInfo.getState() == FAILED) { return 3; } else { return 4; } })) .compound(Collections.reverseOrder(comparing(item -> item.getQueryStats().getCreateTime()))); // TODO There should be a combined interface for this private final boolean resourceManagerEnabled; private final DispatchManager dispatchManager; private final QueryManager queryManager; private final InternalNodeManager internalNodeManager; private final Optional<ResourceManagerProxy> proxyHelper; @Inject public QueryResource( ServerConfig serverConfig, DispatchManager dispatchManager, QueryManager queryManager, InternalNodeManager internalNodeManager, Optional<ResourceManagerProxy> proxyHelper) { this.resourceManagerEnabled = requireNonNull(serverConfig, "serverConfig is null").isResourceManagerEnabled(); this.dispatchManager = requireNonNull(dispatchManager, "dispatchManager is null"); this.queryManager = requireNonNull(queryManager, "queryManager is null"); this.internalNodeManager = requireNonNull(internalNodeManager, "internalNodeManager is null"); this.proxyHelper = requireNonNull(proxyHelper, "proxyHelper is null"); } @GET public void getAllQueryInfo( @QueryParam("state") String stateFilter, @QueryParam("limit") Integer limitFilter, @HeaderParam(X_FORWARDED_PROTO) String xForwardedProto, @Context UriInfo uriInfo, @Context HttpServletRequest servletRequest, @Suspended AsyncResponse asyncResponse) { if (resourceManagerEnabled) { proxyResponse(servletRequest, asyncResponse, xForwardedProto, uriInfo); return; } int limit = firstNonNull(limitFilter, Integer.MAX_VALUE); if (limit <= 0) { throw new WebApplicationException(Response .status(BAD_REQUEST) .type(MediaType.TEXT_PLAIN) .entity(format("Parameter 'limit' for getAllQueryInfo must be positive. Got %d.", limit)) .build()); } List<BasicQueryInfo> queries = new ArrayList<>(dispatchManager.getQueries()); // Filter list by the query state (if specified). if (stateFilter != null) { QueryState expectedState = QueryState.valueOf(stateFilter.toUpperCase(Locale.ENGLISH)); queries.removeIf(item -> item.getState() == expectedState); } // If limit is smaller than number of queries, then ensure that the more recent items are at the front. if (limit < queries.size()) { queries.sort(QUERIES_ORDERING); } else { limit = queries.size(); } ImmutableList.Builder<BasicQueryInfo> builder = new ImmutableList.Builder<>(); builder.addAll(queries.subList(0, limit)); asyncResponse.resume(Response.ok(builder.build()).build()); } @GET @Path("{queryId}") public void getQueryInfo( @PathParam("queryId") QueryId queryId, @HeaderParam(X_FORWARDED_PROTO) String xForwardedProto, @Context UriInfo uriInfo, @Context HttpServletRequest servletRequest, @Suspended AsyncResponse asyncResponse) { requireNonNull(queryId, "queryId is null"); if (resourceManagerEnabled && !dispatchManager.isQueryPresent(queryId)) { proxyResponse(servletRequest, asyncResponse, xForwardedProto, uriInfo); return; } try { QueryInfo queryInfo = queryManager.getFullQueryInfo(queryId); asyncResponse.resume(Response.ok(queryInfo).build()); } catch (NoSuchElementException e) { try { BasicQueryInfo basicQueryInfo = dispatchManager.getQueryInfo(queryId); asyncResponse.resume(Response.ok(basicQueryInfo).build()); } catch (NoSuchElementException ex) { asyncResponse.resume(Response.status(Status.GONE).build()); } } } @DELETE @Path("{queryId}") public void cancelQuery( @PathParam("queryId") QueryId queryId, @HeaderParam(X_FORWARDED_PROTO) String xForwardedProto, @Context UriInfo uriInfo, @Context HttpServletRequest servletRequest, @Suspended AsyncResponse asyncResponse) { requireNonNull(queryId, "queryId is null"); if (resourceManagerEnabled && !dispatchManager.isQueryPresent(queryId)) { proxyResponse(servletRequest, asyncResponse, xForwardedProto, uriInfo); return; } dispatchManager.cancelQuery(queryId); asyncResponse.resume(Response.status(NO_CONTENT).build()); } @PUT @Path("{queryId}/killed") public void killQuery( @PathParam("queryId") QueryId queryId, String message, @HeaderParam(X_FORWARDED_PROTO) String xForwardedProto, @Context UriInfo uriInfo, @Context HttpServletRequest servletRequest, @Suspended AsyncResponse asyncResponse) { if (resourceManagerEnabled && !dispatchManager.isQueryPresent(queryId)) { proxyResponse(servletRequest, asyncResponse, xForwardedProto, uriInfo); return; } asyncResponse.resume(failQuery(queryId, createKillQueryException(message))); } @PUT @Path("{queryId}/preempted") public void preemptQuery( @PathParam("queryId") QueryId queryId, String message, @HeaderParam(X_FORWARDED_PROTO) String xForwardedProto, @Context UriInfo uriInfo, @Context HttpServletRequest servletRequest, @Suspended AsyncResponse asyncResponse) { if (!dispatchManager.isQueryPresent(queryId)) { proxyResponse(servletRequest, asyncResponse, xForwardedProto, uriInfo); return; } asyncResponse.resume(failQuery(queryId, createPreemptQueryException(message))); } private Response failQuery(QueryId queryId, PrestoException queryException) { requireNonNull(queryId, "queryId is null"); try { BasicQueryInfo state = dispatchManager.getQueryInfo(queryId); // check before killing to provide the proper error code (this is racy) if (state.getState().isDone()) { return Response.status(Status.CONFLICT).build(); } dispatchManager.failQuery(queryId, queryException); // verify if the query was failed (if not, we lost the race) if (!queryException.getErrorCode().equals(dispatchManager.getQueryInfo(queryId).getErrorCode())) { return Response.status(Status.CONFLICT).build(); } return Response.status(Status.OK).build(); } catch (NoSuchElementException e) { return Response.status(Status.GONE).build(); } } @DELETE @Path("stage/{stageId}") public void cancelStage( @PathParam("stageId") StageId stageId, @HeaderParam(X_FORWARDED_PROTO) String xForwardedProto, @Context UriInfo uriInfo, @Context HttpServletRequest servletRequest, @Suspended AsyncResponse asyncResponse) { requireNonNull(stageId, "stageId is null"); if (!dispatchManager.isQueryPresent(stageId.getQueryId())) { proxyResponse(servletRequest, asyncResponse, xForwardedProto, uriInfo); return; } queryManager.cancelStage(stageId); asyncResponse.resume(Response.ok().build()); } private void proxyResponse(HttpServletRequest servletRequest, AsyncResponse asyncResponse, String xForwardedProto, UriInfo uriInfo) { try { checkState(proxyHelper.isPresent()); Iterator<InternalNode> resourceManagers = internalNodeManager.getResourceManagers().iterator(); if (!resourceManagers.hasNext()) { asyncResponse.resume(Response.status(SERVICE_UNAVAILABLE).build()); return; } InternalNode resourceManagerNode = resourceManagers.next(); URI uri = uriInfo.getRequestUriBuilder() .scheme(resourceManagerNode.getInternalUri().getScheme()) .host(resourceManagerNode.getHostAndPort().toInetAddress().getHostName()) .port(resourceManagerNode.getInternalUri().getPort()) .build(); proxyHelper.get().performRequest(servletRequest, asyncResponse, uri); } catch (Exception e) { asyncResponse.resume(e); } } }
package org.hisp.dhis.system.util; /* * Copyright (c) 2004-2015, University of Oslo * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * Neither the name of the HISP project nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.commons.lang3.StringUtils; /** * @author Lars Helge Overland */ public class TextUtils { public static final TextUtils INSTANCE = new TextUtils(); public static final String EMPTY = ""; public static final String SPACE = " "; public static final String SEP = "-"; public static final String LN = System.getProperty( "line.separator" ); private static final Pattern LINK_PATTERN = Pattern.compile( "((http://|https://|www\\.).+?)($|\\n|\\r|\\r\\n| )" ); private static final String DELIMITER = ", "; /** * Performs the htmlNewline(String) and htmlLinks(String) methods against * the given text. * * @param text the text to substitute. * @return the substituted text. */ public static String htmlify( String text ) { text = htmlLinks( text ); text = htmlNewline( text ); return text; } /** * Substitutes links in the given text with valid HTML mark-up. For instance, * http://dhis2.org is replaced with <a href="http://dhis2.org">http://dhis2.org</a>, * and www.dhis2.org is replaced with <a href="http://dhis2.org">www.dhis2.org</a>. * * @param text the text to substitute links for. * @return the substituted text. */ public static String htmlLinks( String text ) { if ( text == null || text.trim().isEmpty() ) { return null; } Matcher matcher = LINK_PATTERN.matcher( text ); StringBuffer buffer = new StringBuffer(); while ( matcher.find() ) { String url = matcher.group( 1 ); String suffix = matcher.group( 3 ); String ref = url.startsWith( "www." ) ? "http://" + url : url; url = "<a href=\"" + ref + "\">" + url + "</a>" + suffix; matcher.appendReplacement( buffer, url ); } return matcher.appendTail( buffer ).toString(); } /** * Replaces common newline characters like \n, \r, \r\n to the HTML line * break tag <br>. * * @param text the text to substitute. * @return the substituted text. */ public static String htmlNewline( String text ) { if ( text == null || text.trim().isEmpty() ) { return null; } return text.replaceAll( "(\n|\r|\r\n)", "<br>" ); } /** * Returns a list of tokens based on the given string. * * @param string the string. * @return the list of tokens. */ public static List<String> getTokens( String string ) { if ( string == null ) { return null; } return new ArrayList<>( Arrays.asList( string.split( "\\s" ) ) ); } /** * Gets the sub string of the given string. If the beginIndex is larger than * the length of the string, the empty string is returned. If the beginIndex + * the length is larger than the length of the string, the part of the string * following the beginIndex is returned. Method is out-of-range safe. * * @param string the string. * @param beginIndex the zero-based begin index. * @param length the length of the sub string starting at the begin index. * @return the sub string of the given string. */ public static String subString( String string, int beginIndex, int length ) { if ( string == null ) { return null; } final int endIndex = beginIndex + length; if ( beginIndex >= string.length() ) { return EMPTY; } if ( endIndex > string.length() ) { return string.substring( beginIndex, string.length() ); } return string.substring( beginIndex, endIndex ); } /** * Removes the last given number of characters from the given string. Returns * null if the string is null. Returns an empty string if characters is less * than zero or greater than the length of the string. * * @param string the string. * @param characters number of characters to remove. * @return the substring. */ public static String removeLast( String string, int characters ) { if ( string == null ) { return null; } if ( characters < 0 || characters > string.length() ) { return EMPTY; } return string.substring( 0, string.length() - characters ); } /** * Removes the last occurence of the word "or" from the given string, * including potential trailing spaces, case-insentitive. * * @param string the string. * @return the chopped string. */ public static String removeLastOr( String string ) { string = StringUtils.stripEnd( string, " " ); return StringUtils.removeEndIgnoreCase( string, "or" ); } /** * Removes the last occurence of the word "and" from the given string, * including potential trailing spaces, case-insentitive. * * @param string the string. * @return the chopped string. */ public static String removeLastAnd( String string ) { string = StringUtils.stripEnd( string, " " ); return StringUtils.removeEndIgnoreCase( string, "and" ); } /** * Removes the last occurence of comma (",") from the given string, * including potential trailing spaces. * * @param string the string. * @return the chopped string. */ public static String removeLastComma( String string ) { string = StringUtils.stripEnd( string, " " ); return StringUtils.removeEndIgnoreCase( string, "," ); } /** * Trims the given string from the end. * * @param value the value to trim. * @param length the number of characters to trim. * @return the trimmed value, empty if given value is null or length is higher * than the value length. */ public static String trimEnd( String value, int length ) { if ( value == null || length > value.length() ) { return EMPTY; } return value.substring( 0, value.length() - length ); } /** * Returns an empty string if the given argument is true, the string * otherwise. This is a convenience method. * * @param string the string. * @param emptyString whether to return an empty string. * @return a string. */ public static String getString( String string, boolean emptyString ) { return emptyString ? EMPTY : string; } /** * Transforms a collection of Integers into a comma delimited String. If the * given collection of elements are null or is empty, an empty String is * returned. * * @param elements the collection of Integers * @return a comma delimited String. */ public static String getCommaDelimitedString( Collection<?> elements ) { final StringBuilder builder = new StringBuilder(); if ( elements != null && !elements.isEmpty() ) { for ( Object element : elements ) { builder.append( element.toString() ).append( DELIMITER ); } return builder.substring( 0, builder.length() - DELIMITER.length() ); } return builder.toString(); } /** * Joins the elements of the provided array into a single String containing * the provided list of elements. * * @param list the list of objects to join. * @param separator the separator string. * @param nullReplacement the value to replace nulls in list with. * @return the joined string. */ public static <T> String join( List<T> list, String separator, T nullReplacement ) { if ( list == null ) { return null; } List<T> objects = new ArrayList<T>( list ); if ( nullReplacement != null ) { Collections.replaceAll( objects, null, nullReplacement ); } return StringUtils.join( objects, separator ); } /** * Transforms a collection of Integers into a comma delimited String. If the * given collection of elements are null or is empty, an empty String is * returned. * * @param delimitPrefix whether to prefix the string with a delimiter. * @param delimitSuffix whether to suffix the string with a delimiter. * @param elements the collection of Integers * @return a comma delimited String. */ public static String getCommaDelimitedString( Collection<?> elements, boolean delimitPrefix, boolean delimitSuffix ) { final StringBuilder builder = new StringBuilder(); if ( elements != null && !elements.isEmpty() ) { if ( delimitPrefix ) { builder.append( DELIMITER ); } builder.append( getCommaDelimitedString( elements ) ); if ( delimitSuffix ) { builder.append( DELIMITER ); } } return builder.toString(); } /** * Transforms a collection of strings into a comma delimited string, where * each component get single-quoted. * * @param elements the collection of Integers * @return a comma delimited String. */ public static String getQuotedCommaDelimitedString( Collection<String> elements ) { if ( elements != null && elements.size() > 0 ) { final StringBuffer buffer = new StringBuffer(); for ( String element : elements ) { buffer.append( "'" ).append( element.toString() ).append( "', " ); } return buffer.substring( 0, buffer.length() - ", ".length() ); } return null; } /** * Returns null if the given string is not null and contains no characters, * the string itself otherwise. * * @param string the string. * @return null if the given string is not null and contains no characters, * the string itself otherwise. */ public static String nullIfEmpty( String string ) { return string != null && string.trim().length() == 0 ? null : string; } /** * Checks the two strings for equality. * * @param s1 string 1. * @param s2 string 2. * @return true if strings are equal, false otherwise. */ public static boolean equalsNullSafe( String s1, String s2 ) { return s1 == null ? s2 == null : s1.equals( s2 ); } /** * Returns the string value of the given boolean. Returns null if argument * is null. * * @param value the boolean. * @return the string value. */ public static String valueOf( Boolean value ) { return value != null ? String.valueOf( value ) : null; } /** * Returns the boolean value of the given string. Returns null if argument * is null. * * @param value the string value. * @return the boolean. */ public static Boolean valueOf( String value ) { return value != null ? Boolean.valueOf( value ) : null; } /** * Null-safe method for converting the given string to lower-case. * * @param string the string. * @return the string in lower-case. */ public static String lower( String string ) { return string != null ? string.toLowerCase() : null; } /** * Null-safe method for writing the items of a string array out as a string * separated by the given char separator. * * @param array the array. * @param separator the separator of the array items. * @return a string. */ public static String toString( String[] array, String separator ) { StringBuilder builder = new StringBuilder(); if ( array != null && array.length > 0 ) { for ( String string : array ) { builder.append( string ).append( separator ); } builder.deleteCharAt( builder.length() - 1 ); } return builder.toString(); } /** * Returns the string representation of the object, or null if the object is * null. * * @param object the object. * @return the string representation. */ public static String toString( Object object ) { return object != null ? object.toString() : null; } /** * Invokes append tail on matcher with the given string buffer, and returns * the string buffer as a string. * * @param matcher the matcher. * @param sb the string buffer. * @return a string. */ public static String appendTail( Matcher matcher, StringBuffer sb ) { matcher.appendTail( sb ); return sb.toString(); } }
/* * Copyright (c) 2017, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.internal.cluster.impl; import com.hazelcast.cluster.Joiner; import com.hazelcast.config.Config; import com.hazelcast.config.ServiceConfig; import com.hazelcast.core.HazelcastInstance; import com.hazelcast.instance.AddressPicker; import com.hazelcast.instance.DefaultNodeExtension; import com.hazelcast.instance.MemberImpl; import com.hazelcast.instance.Node; import com.hazelcast.instance.NodeContext; import com.hazelcast.instance.NodeExtension; import com.hazelcast.internal.cluster.MemberInfo; import com.hazelcast.internal.cluster.impl.operations.MembersUpdateOp; import com.hazelcast.nio.Address; import com.hazelcast.nio.ConnectionManager; import com.hazelcast.nio.ObjectDataInput; import com.hazelcast.spi.Operation; import com.hazelcast.spi.OperationService; import com.hazelcast.spi.PostJoinAwareService; import com.hazelcast.spi.PreJoinAwareService; import com.hazelcast.spi.impl.NodeEngineImpl; import com.hazelcast.spi.properties.GroupProperty; import com.hazelcast.test.AssertTask; import com.hazelcast.test.HazelcastParallelClassRunner; import com.hazelcast.test.HazelcastTestSupport; import com.hazelcast.test.RequireAssertEnabled; import com.hazelcast.test.TestHazelcastInstanceFactory; import com.hazelcast.test.annotation.ParallelTest; import com.hazelcast.test.annotation.QuickTest; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import java.io.IOException; import java.nio.channels.ServerSocketChannel; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.concurrent.Future; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.atomic.AtomicReferenceArray; import java.util.concurrent.locks.LockSupport; import static com.hazelcast.instance.HazelcastInstanceFactory.newHazelcastInstance; import static com.hazelcast.internal.cluster.impl.ClusterDataSerializerHook.FINALIZE_JOIN; import static com.hazelcast.internal.cluster.impl.ClusterDataSerializerHook.F_ID; import static com.hazelcast.internal.cluster.impl.ClusterDataSerializerHook.MEMBER_INFO_UPDATE; import static com.hazelcast.spi.properties.GroupProperty.MEMBER_LIST_PUBLISH_INTERVAL_SECONDS; import static com.hazelcast.test.PacketFiltersUtil.delayOperationsFrom; import static com.hazelcast.test.PacketFiltersUtil.dropOperationsBetween; import static com.hazelcast.test.PacketFiltersUtil.dropOperationsFrom; import static com.hazelcast.test.PacketFiltersUtil.resetPacketFiltersFrom; import static com.hazelcast.util.UuidUtil.newUnsecureUuidString; import static java.lang.Thread.currentThread; import static java.util.Arrays.asList; import static java.util.Collections.singleton; import static java.util.Collections.singletonList; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.fail; @RunWith(HazelcastParallelClassRunner.class) @Category({QuickTest.class, ParallelTest.class}) public class MembershipUpdateTest extends HazelcastTestSupport { private TestHazelcastInstanceFactory factory; @Before public void init() { factory = createHazelcastInstanceFactory(); } @Test public void sequential_member_join() { HazelcastInstance[] instances = new HazelcastInstance[4]; for (int i = 0; i < instances.length; i++) { instances[i] = factory.newHazelcastInstance(); } for (HazelcastInstance instance : instances) { assertClusterSizeEventually(instances.length, instance); } MemberMap referenceMemberMap = getMemberMap(instances[0]); // version = number of started members assertEquals(instances.length, referenceMemberMap.getVersion()); for (HazelcastInstance instance : instances) { MemberMap memberMap = getMemberMap(instance); assertMemberViewsAreSame(referenceMemberMap, memberMap); } } @Test public void parallel_member_join() { final AtomicReferenceArray<HazelcastInstance> instances = new AtomicReferenceArray<HazelcastInstance>(4); for (int i = 0; i < instances.length(); i++) { final int ix = i; spawn(new Runnable() { @Override public void run() { instances.set(ix, factory.newHazelcastInstance()); } }); } assertTrueEventually(new AssertTask() { @Override public void run() throws Exception { for (int i = 0; i < instances.length(); i++) { HazelcastInstance instance = instances.get(i); assertNotNull(instance); assertClusterSize(instances.length(), instance); } } }); MemberMap referenceMemberMap = getMemberMap(instances.get(0)); // version = number of started members assertEquals(instances.length(), referenceMemberMap.getVersion()); for (int i = 0; i < instances.length(); i++) { HazelcastInstance instance = instances.get(i); MemberMap memberMap = getMemberMap(instance); assertMemberViewsAreSame(referenceMemberMap, memberMap); } } @Test public void parallel_member_join_whenPostJoinOperationPresent() throws InterruptedException { CountDownLatch latch = new CountDownLatch(1); final Config config = getConfigWithService(new PostJoinAwareServiceImpl(latch), PostJoinAwareServiceImpl.SERVICE_NAME); final AtomicReferenceArray<HazelcastInstance> instances = new AtomicReferenceArray<HazelcastInstance>(6); for (int i = 0; i < instances.length(); i++) { final int ix = i; spawn(new Runnable() { @Override public void run() { instances.set(ix, factory.newHazelcastInstance(config)); } }); } // just a random latency sleepSeconds(3); latch.countDown(); assertTrueEventually(new AssertTask() { @Override public void run() throws Exception { for (int i = 0; i < instances.length(); i++) { HazelcastInstance instance = instances.get(i); assertNotNull(instance); assertClusterSize(instances.length(), instance); } } }); } @Test public void parallel_member_join_whenPreJoinOperationPresent() throws InterruptedException { CountDownLatch latch = new CountDownLatch(1); PreJoinAwareServiceImpl service = new PreJoinAwareServiceImpl(latch); final Config config = getConfigWithService(service, PreJoinAwareServiceImpl.SERVICE_NAME); final AtomicReferenceArray<HazelcastInstance> instances = new AtomicReferenceArray<HazelcastInstance>(6); for (int i = 0; i < instances.length(); i++) { final int ix = i; spawn(new Runnable() { @Override public void run() { instances.set(ix, factory.newHazelcastInstance(config)); } }); } sleepSeconds(3); latch.countDown(); assertTrueEventually(new AssertTask() { @Override public void run() throws Exception { for (int i = 0; i < instances.length(); i++) { HazelcastInstance instance = instances.get(i); assertNotNull(instance); assertClusterSize(instances.length(), instance); } } }); } @Test public void sequential_member_join_and_removal() { HazelcastInstance[] instances = new HazelcastInstance[4]; for (int i = 0; i < instances.length; i++) { instances[i] = factory.newHazelcastInstance(); } for (HazelcastInstance instance : instances) { assertClusterSizeEventually(instances.length, instance); } instances[instances.length - 1].shutdown(); for (int i = 0; i < instances.length - 1; i++) { HazelcastInstance instance = instances[i]; assertClusterSizeEventually(instances.length - 1, instance); } MemberMap referenceMemberMap = getMemberMap(instances[0]); // version = number of started members + 1 removal assertEquals(instances.length + 1, referenceMemberMap.getVersion()); for (int i = 0; i < instances.length - 1; i++) { HazelcastInstance instance = instances[i]; MemberMap memberMap = getMemberMap(instance); assertMemberViewsAreSame(referenceMemberMap, memberMap); } } @Test public void sequential_member_join_and_restart() { HazelcastInstance[] instances = new HazelcastInstance[3]; for (int i = 0; i < instances.length; i++) { instances[i] = factory.newHazelcastInstance(); } for (HazelcastInstance instance : instances) { assertClusterSizeEventually(instances.length, instance); } instances[instances.length - 1].shutdown(); instances[instances.length - 1] = factory.newHazelcastInstance(); for (HazelcastInstance instance : instances) { assertClusterSizeEventually(instances.length, instance); } MemberMap referenceMemberMap = getMemberMap(instances[0]); // version = number of started members + 1 removal + 1 start assertEquals(instances.length + 2, referenceMemberMap.getVersion()); for (HazelcastInstance instance : instances) { MemberMap memberMap = getMemberMap(instance); assertMemberViewsAreSame(referenceMemberMap, memberMap); } } @Test public void parallel_member_join_and_removal() { final AtomicReferenceArray<HazelcastInstance> instances = new AtomicReferenceArray<HazelcastInstance>(4); for (int i = 0; i < instances.length(); i++) { final int ix = i; spawn(new Runnable() { @Override public void run() { instances.set(ix, factory.newHazelcastInstance()); } }); } assertTrueEventually(new AssertTask() { @Override public void run() throws Exception { for (int i = 0; i < instances.length(); i++) { HazelcastInstance instance = instances.get(i); assertNotNull(instance); assertClusterSize(instances.length(), instance); } } }); for (int i = 0; i < instances.length(); i++) { if (getNode(instances.get(i)).isMaster()) { continue; } instances.getAndSet(i, null).shutdown(); break; } for (int i = 0; i < instances.length(); i++) { HazelcastInstance instance = instances.get(i); if (instance != null) { assertClusterSizeEventually(instances.length() - 1, instance); } } HazelcastInstance master = null; for (int i = 0; i < instances.length(); i++) { HazelcastInstance instance = instances.get(i); if (instance != null && getNode(instance).isMaster()) { master = instance; break; } } assertNotNull(master); MemberMap referenceMemberMap = getMemberMap(master); // version = number of started members + 1 removal assertEquals(instances.length() + 1, referenceMemberMap.getVersion()); for (int i = 0; i < instances.length(); i++) { HazelcastInstance instance = instances.get(i); if (instance != null) { MemberMap memberMap = getMemberMap(instance); assertMemberViewsAreSame(referenceMemberMap, memberMap); } } } @Test public void parallel_member_join_and_restart() { final AtomicReferenceArray<HazelcastInstance> instances = new AtomicReferenceArray<HazelcastInstance>(3); for (int i = 0; i < instances.length(); i++) { final int ix = i; spawn(new Runnable() { @Override public void run() { instances.set(ix, factory.newHazelcastInstance()); } }); } assertTrueEventually(new AssertTask() { @Override public void run() throws Exception { for (int i = 0; i < instances.length(); i++) { HazelcastInstance instance = instances.get(i); assertNotNull(instance); assertClusterSize(instances.length(), instance); } } }); for (int i = 0; i < instances.length(); i++) { if (getNode(instances.get(i)).isMaster()) { continue; } instances.get(i).shutdown(); instances.set(i, factory.newHazelcastInstance()); break; } for (int i = 0; i < instances.length(); i++) { HazelcastInstance instance = instances.get(i); assertClusterSizeEventually(instances.length(), instance); } HazelcastInstance master = null; for (int i = 0; i < instances.length(); i++) { HazelcastInstance instance = instances.get(i); if (getNode(instances.get(i)).isMaster()) { master = instance; break; } } assertNotNull(master); MemberMap referenceMemberMap = getMemberMap(master); // version = number of started members + 1 removal + 1 start assertEquals(instances.length() + 2, referenceMemberMap.getVersion()); for (int i = 0; i < instances.length(); i++) { HazelcastInstance instance = instances.get(i); MemberMap memberMap = getMemberMap(instance); assertMemberViewsAreSame(referenceMemberMap, memberMap); } } @Test public void memberListsConverge_whenMemberUpdateMissed() { Config config = new Config(); HazelcastInstance hz1 = factory.newHazelcastInstance(config); HazelcastInstance hz2 = factory.newHazelcastInstance(config); assertClusterSize(2, hz1, hz2); dropOperationsFrom(hz1, F_ID, singletonList(MEMBER_INFO_UPDATE)); HazelcastInstance hz3 = factory.newHazelcastInstance(config); assertClusterSize(3, hz1, hz3); assertClusterSize(2, hz2); resetPacketFiltersFrom(hz1); ClusterServiceImpl clusterService = (ClusterServiceImpl) getClusterService(hz1); clusterService.getMembershipManager().sendMemberListToMember(getAddress(hz2)); assertClusterSizeEventually(3, hz2); MemberMap referenceMemberMap = getMemberMap(hz1); assertMemberViewsAreSame(referenceMemberMap, getMemberMap(hz2)); assertMemberViewsAreSame(referenceMemberMap, getMemberMap(hz3)); } @Test public void memberListsConverge_whenMemberUpdateMissed_withPeriodicUpdates() { Config config = new Config(); config.setProperty(MEMBER_LIST_PUBLISH_INTERVAL_SECONDS.getName(), "5"); HazelcastInstance hz1 = factory.newHazelcastInstance(config); HazelcastInstance hz2 = factory.newHazelcastInstance(config); assertClusterSize(2, hz1, hz2); dropOperationsFrom(hz1, F_ID, singletonList(MEMBER_INFO_UPDATE)); HazelcastInstance hz3 = factory.newHazelcastInstance(config); assertClusterSize(3, hz1, hz3); assertClusterSize(2, hz2); resetPacketFiltersFrom(hz1); assertClusterSizeEventually(3, hz2); MemberMap referenceMemberMap = getMemberMap(hz1); assertMemberViewsAreSame(referenceMemberMap, getMemberMap(hz2)); assertMemberViewsAreSame(referenceMemberMap, getMemberMap(hz3)); } @Test public void memberListsConverge_whenMembershipUpdatesSent_outOfOrder() { Config config = new Config(); config.setProperty(MEMBER_LIST_PUBLISH_INTERVAL_SECONDS.getName(), "1"); HazelcastInstance hz1 = factory.newHazelcastInstance(config); delayOperationsFrom(hz1, F_ID, singletonList(MEMBER_INFO_UPDATE)); HazelcastInstance hz2 = factory.newHazelcastInstance(config); HazelcastInstance hz3 = factory.newHazelcastInstance(config); HazelcastInstance hz4 = factory.newHazelcastInstance(config); HazelcastInstance hz5 = factory.newHazelcastInstance(config); HazelcastInstance[] instances = new HazelcastInstance[]{hz1, hz2, hz3, hz4, hz5}; for (HazelcastInstance instance : instances) { assertClusterSizeEventually(5, instance); } MemberMap referenceMemberMap = getMemberMap(hz1); for (HazelcastInstance instance : instances) { assertMemberViewsAreSame(referenceMemberMap, getMemberMap(instance)); } } @Test public void memberListsConverge_whenFinalizeJoinAndMembershipUpdatesSent_outOfOrder() { Config config = new Config(); config.setProperty(MEMBER_LIST_PUBLISH_INTERVAL_SECONDS.getName(), "1"); HazelcastInstance hz1 = factory.newHazelcastInstance(config); delayOperationsFrom(hz1, F_ID, asList(MEMBER_INFO_UPDATE, FINALIZE_JOIN)); HazelcastInstance hz2 = factory.newHazelcastInstance(config); HazelcastInstance hz3 = factory.newHazelcastInstance(config); HazelcastInstance hz4 = factory.newHazelcastInstance(config); HazelcastInstance hz5 = factory.newHazelcastInstance(config); HazelcastInstance[] instances = new HazelcastInstance[]{hz1, hz2, hz3, hz4, hz5}; for (HazelcastInstance instance : instances) { assertClusterSizeEventually(5, instance); } MemberMap referenceMemberMap = getMemberMap(hz1); for (HazelcastInstance instance : instances) { assertMemberViewsAreSame(referenceMemberMap, getMemberMap(instance)); } } @Test public void memberListsConverge_whenExistingMemberMissesMemberRemove_withPeriodicUpdates() { Config config = new Config(); config.setProperty(MEMBER_LIST_PUBLISH_INTERVAL_SECONDS.getName(), "1"); HazelcastInstance hz1 = factory.newHazelcastInstance(config); HazelcastInstance hz2 = factory.newHazelcastInstance(config); HazelcastInstance hz3 = factory.newHazelcastInstance(config); assertClusterSize(3, hz1, hz3); assertClusterSizeEventually(3, hz2); dropOperationsBetween(hz1, hz3, F_ID, singletonList(MEMBER_INFO_UPDATE)); hz2.getLifecycleService().terminate(); assertClusterSizeEventually(2, hz1); assertClusterSize(3, hz3); resetPacketFiltersFrom(hz1); assertClusterSizeEventually(2, hz3); assertMemberViewsAreSame(getMemberMap(hz1), getMemberMap(hz3)); } @Test public void memberListsConverge_whenExistingMemberMissesMemberRemove_afterNewMemberJoins() { Config config = new Config(); config.setProperty(MEMBER_LIST_PUBLISH_INTERVAL_SECONDS.getName(), String.valueOf(Integer.MAX_VALUE)); HazelcastInstance hz1 = factory.newHazelcastInstance(config); HazelcastInstance hz2 = factory.newHazelcastInstance(config); HazelcastInstance hz3 = factory.newHazelcastInstance(config); assertClusterSize(3, hz1, hz3); assertClusterSizeEventually(3, hz2); dropOperationsBetween(hz1, hz3, F_ID, singletonList(MEMBER_INFO_UPDATE)); hz2.getLifecycleService().terminate(); assertClusterSizeEventually(2, hz1); assertClusterSize(3, hz3); resetPacketFiltersFrom(hz1); HazelcastInstance hz4 = factory.newHazelcastInstance(config); assertClusterSizeEventually(3, hz3); assertMemberViewsAreSame(getMemberMap(hz1), getMemberMap(hz3)); assertMemberViewsAreSame(getMemberMap(hz1), getMemberMap(hz4)); } @Test @RequireAssertEnabled public void memberReceives_memberUpdateNotContainingItself() throws Exception { Config config = new Config(); config.setProperty(MEMBER_LIST_PUBLISH_INTERVAL_SECONDS.getName(), String.valueOf(Integer.MAX_VALUE)); HazelcastInstance hz1 = factory.newHazelcastInstance(config); HazelcastInstance hz2 = factory.newHazelcastInstance(config); HazelcastInstance hz3 = factory.newHazelcastInstance(config); Node node = getNode(hz1); ClusterServiceImpl clusterService = node.getClusterService(); MembershipManager membershipManager = clusterService.getMembershipManager(); MembersView membersView = MembersView.createNew(membershipManager.getMemberListVersion() + 1, asList(membershipManager.getMember(getAddress(hz1)), membershipManager.getMember(getAddress(hz2)))); Operation memberUpdate = new MembersUpdateOp(membershipManager.getMember(getAddress(hz3)).getUuid(), membersView, clusterService.getClusterTime(), null, true); memberUpdate.setCallerUuid(node.getThisUuid()); Future<Object> future = node.getNodeEngine().getOperationService().invokeOnTarget(null, memberUpdate, getAddress(hz3)); try { future.get(); fail("Membership update should fail!"); } catch (AssertionError error) { // AssertionError expected (requires assertions enabled) } } @Test public void memberReceives_memberUpdateFromInvalidMaster() throws Exception { Config config = new Config(); config.setProperty(MEMBER_LIST_PUBLISH_INTERVAL_SECONDS.getName(), String.valueOf(Integer.MAX_VALUE)); HazelcastInstance hz1 = factory.newHazelcastInstance(config); HazelcastInstance hz2 = factory.newHazelcastInstance(config); HazelcastInstance hz3 = factory.newHazelcastInstance(config); Node node = getNode(hz1); ClusterServiceImpl clusterService = node.getClusterService(); MembershipManager membershipManager = clusterService.getMembershipManager(); MemberInfo newMemberInfo = new MemberInfo(new Address("127.0.0.1", 6000), newUnsecureUuidString(), Collections.<String, Object>emptyMap(), node.getVersion()); MembersView membersView = MembersView.cloneAdding(membershipManager.getMembersView(), singleton(newMemberInfo)); Operation memberUpdate = new MembersUpdateOp(membershipManager.getMember(getAddress(hz3)).getUuid(), membersView, clusterService.getClusterTime(), null, true); NodeEngineImpl nonMasterNodeEngine = getNodeEngineImpl(hz2); memberUpdate.setCallerUuid(nonMasterNodeEngine.getNode().getThisUuid()); Future<Object> future = nonMasterNodeEngine.getOperationService().invokeOnTarget(null, memberUpdate, getAddress(hz3)); future.get(); // member update should not be applied assertClusterSize(3, hz1, hz2, hz3); assertMemberViewsAreSame(getMemberMap(hz1), getMemberMap(hz2)); assertMemberViewsAreSame(getMemberMap(hz1), getMemberMap(hz3)); } @Test public void memberListOrder_shouldBeSame_whenMemberRestartedWithSameIdentity() { Config config = new Config(); config.setProperty(GroupProperty.MEMBER_LIST_PUBLISH_INTERVAL_SECONDS.getName(), "5"); config.setProperty(GroupProperty.MAX_JOIN_SECONDS.getName(), "5"); final HazelcastInstance hz1 = factory.newHazelcastInstance(config); final HazelcastInstance hz2 = factory.newHazelcastInstance(config); HazelcastInstance hz3 = factory.newHazelcastInstance(config); HazelcastInstance hz4 = factory.newHazelcastInstance(config); assertClusterSize(4, hz2, hz3); dropOperationsBetween(hz1, hz2, F_ID, singletonList(MEMBER_INFO_UPDATE)); final MemberImpl member3 = getNode(hz3).getLocalMember(); hz3.getLifecycleService().terminate(); assertClusterSizeEventually(3, hz1, hz4); assertClusterSize(4, hz2); hz3 = newHazelcastInstance(config, "test-instance", new StaticMemberNodeContext(factory, member3)); assertClusterSizeEventually(4, hz1, hz4); resetPacketFiltersFrom(hz1); assertMemberViewsAreSame(getMemberMap(hz1), getMemberMap(hz3)); assertMemberViewsAreSame(getMemberMap(hz1), getMemberMap(hz4)); assertTrueEventually(new AssertTask() { @Override public void run() throws Exception { assertMemberViewsAreSame(getMemberMap(hz1), getMemberMap(hz2)); } }); } @Test public void shouldNotProcessStaleJoinRequest() { HazelcastInstance hz1 = factory.newHazelcastInstance(); HazelcastInstance hz2 = factory.newHazelcastInstance(); JoinRequest staleJoinReq = getNode(hz2).createJoinRequest(true); hz2.shutdown(); assertClusterSizeEventually(1, hz1); ClusterServiceImpl clusterService = (ClusterServiceImpl) getClusterService(hz1); clusterService.getClusterJoinManager().handleJoinRequest(staleJoinReq, null); assertClusterSize(1, hz1); } // On a joining member assert that no operations are executed before pre join operations execution is completed. @Test public void noOperationExecuted_beforePreJoinOpIsDone() { CountDownLatch latch = new CountDownLatch(1); PreJoinAwareServiceImpl service = new PreJoinAwareServiceImpl(latch); final Config config = getConfigWithService(service, PreJoinAwareServiceImpl.SERVICE_NAME); HazelcastInstance instance1 = factory.newHazelcastInstance(config); final Address instance2Address = factory.nextAddress(); final OperationService operationService = getNode(instance1).getNodeEngine().getOperationService(); // send operations from master to joining member. The master has already added the joining member to its member list // while the FinalizeJoinOp is being executed on joining member, so it might send operations to the joining member. Future sendOpsFromMaster = spawn(new Runnable() { @Override public void run() { while (true) { try { ExecutionTrackerOp op = new ExecutionTrackerOp(); operationService.send(op, instance2Address); } catch (Exception e) { e.printStackTrace(); } if (currentThread().isInterrupted()) { break; } LockSupport.parkNanos(1); } } }); final AtomicReference<HazelcastInstance> instanceReference = new AtomicReference<HazelcastInstance>(null); spawn(new Runnable() { @Override public void run() { instanceReference.set(factory.newHazelcastInstance(instance2Address, config)); } }); sleepSeconds(10); // on latch countdown, the pre-join op completes latch.countDown(); sleepSeconds(5); sendOpsFromMaster.cancel(true); assertFalse(service.otherOpExecutedBeforePreJoin.get()); } @Test public void joiningMemberShouldShutdown_whenExceptionDeserializingPreJoinOp() { Config config = getConfigWithService(new FailingPreJoinOpService(), FailingPreJoinOpService.SERVICE_NAME); HazelcastInstance hz1 = factory.newHazelcastInstance(config); // joining member fails while deserializing pre-join op and should shutdown try { factory.newHazelcastInstance(config); fail("Second HazelcastInstance should not have started"); } catch (IllegalStateException e) { // expected } assertClusterSize(1, hz1); } @Test public void joiningMemberShouldShutdown_whenExceptionDeserializingPostJoinOp() { Config config = getConfigWithService(new FailingPostJoinOpService(), FailingPostJoinOpService.SERVICE_NAME); HazelcastInstance hz1 = factory.newHazelcastInstance(config); // joining member fails while deserializing post-join op and should shutdown try { factory.newHazelcastInstance(config); fail("Second HazelcastInstance should not have started"); } catch (IllegalStateException e) { // expected } assertClusterSize(1, hz1); } private Config getConfigWithService(Object service, String serviceName) { final Config config = new Config(); ServiceConfig serviceConfig = new ServiceConfig().setEnabled(true) .setName(serviceName).setImplementation(service); config.getServicesConfig().addServiceConfig(serviceConfig); return config; } static void assertMemberViewsAreSame(MemberMap expectedMemberMap, MemberMap actualMemberMap) { assertEquals(expectedMemberMap.getVersion(), actualMemberMap.getVersion()); assertEquals(expectedMemberMap.size(), actualMemberMap.size()); // order is important List<MemberImpl> expectedMembers = new ArrayList<MemberImpl>(expectedMemberMap.getMembers()); List<MemberImpl> actualMembers = new ArrayList<MemberImpl>(actualMemberMap.getMembers()); assertEquals(expectedMembers, actualMembers); } static MemberMap getMemberMap(HazelcastInstance instance) { ClusterServiceImpl clusterService = getNode(instance).getClusterService(); return clusterService.getMembershipManager().getMemberMap(); } public static class StaticMemberNodeContext implements NodeContext { final NodeContext delegate; final MemberImpl member; public StaticMemberNodeContext(TestHazelcastInstanceFactory factory, MemberImpl member) { this.member = member; delegate = factory.getRegistry().createNodeContext(member.getAddress()); } @Override public NodeExtension createNodeExtension(Node node) { return new DefaultNodeExtension(node) { @Override public String createMemberUuid(Address address) { return member.getUuid(); } }; } @Override public AddressPicker createAddressPicker(Node node) { return delegate.createAddressPicker(node); } @Override public Joiner createJoiner(Node node) { return delegate.createJoiner(node); } @Override public ConnectionManager createConnectionManager(Node node, ServerSocketChannel serverSocketChannel) { return delegate.createConnectionManager(node, serverSocketChannel); } } private static class PostJoinAwareServiceImpl implements PostJoinAwareService { static final String SERVICE_NAME = "post-join-service"; final CountDownLatch latch; private PostJoinAwareServiceImpl(CountDownLatch latch) { this.latch = latch; } @Override public Operation getPostJoinOperation() { return new TimeConsumingPostJoinOperation(); } } private static class TimeConsumingPostJoinOperation extends Operation { @Override public void run() throws Exception { PostJoinAwareServiceImpl service = getService(); service.latch.await(); } @Override public String getServiceName() { return PostJoinAwareServiceImpl.SERVICE_NAME; } } private static class PreJoinAwareServiceImpl implements PreJoinAwareService { static final String SERVICE_NAME = "pre-join-service"; final CountDownLatch latch; final AtomicBoolean preJoinOpExecutionCompleted = new AtomicBoolean(); final AtomicBoolean otherOpExecutedBeforePreJoin = new AtomicBoolean(); private PreJoinAwareServiceImpl(CountDownLatch latch) { this.latch = latch; } @Override public Operation getPreJoinOperation() { return new TimeConsumingPreJoinOperation(); } } private static class TimeConsumingPreJoinOperation extends Operation { @Override public void run() throws Exception { PreJoinAwareServiceImpl service = getService(); service.latch.await(); service.preJoinOpExecutionCompleted.set(true); } @Override public String getServiceName() { return PreJoinAwareServiceImpl.SERVICE_NAME; } } private static class ExecutionTrackerOp extends Operation { @Override public void run() throws Exception { PreJoinAwareServiceImpl preJoinAwareService = getService(); if (!preJoinAwareService.preJoinOpExecutionCompleted.get()) { preJoinAwareService.otherOpExecutedBeforePreJoin.set(true); } } @Override public boolean returnsResponse() { return false; } @Override public String getServiceName() { return PreJoinAwareServiceImpl.SERVICE_NAME; } } private static class FailingPreJoinOpService implements PreJoinAwareService { static final String SERVICE_NAME = "failing-pre-join-service"; @Override public Operation getPreJoinOperation() { return new FailsDeserializationOperation(); } } private static class FailingPostJoinOpService implements PostJoinAwareService { static final String SERVICE_NAME = "failing-post-join-service"; @Override public Operation getPostJoinOperation() { return new FailsDeserializationOperation(); } } public static class FailsDeserializationOperation extends Operation { @Override public void run() throws Exception { } @Override protected void readInternal(ObjectDataInput in) throws IOException { throw new RuntimeException("This operation always fails during deserialization"); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.drill.exec.physical.rowSet.impl; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.util.Arrays; import org.apache.drill.common.exceptions.UserException; import org.apache.drill.common.types.TypeProtos.DataMode; import org.apache.drill.common.types.TypeProtos.MinorType; import org.apache.drill.exec.physical.rowSet.ResultSetLoader; import org.apache.drill.exec.physical.rowSet.RowSetLoader; import org.apache.drill.exec.physical.rowSet.impl.ResultSetLoaderImpl.ResultSetOptions; import org.apache.drill.exec.record.MaterializedField; import org.apache.drill.exec.record.TupleMetadata; import org.apache.drill.exec.vector.ValueVector; import org.apache.drill.exec.vector.accessor.ScalarElementReader; import org.apache.drill.exec.vector.accessor.ScalarWriter; import org.apache.drill.test.SubOperatorTest; import org.apache.drill.test.rowSet.RowSet; import org.apache.drill.test.rowSet.RowSetReader; import org.apache.drill.test.rowSet.SchemaBuilder; import org.junit.Test; import com.google.common.base.Charsets; /** * Exercise the vector overflow functionality for the result set loader. */ public class TestResultSetLoaderOverflow extends SubOperatorTest { /** * Test that the writer detects a vector overflow. The offending column * value should be moved to the next batch. */ @Test public void testVectorSizeLimit() { TupleMetadata schema = new SchemaBuilder() .add("s", MinorType.VARCHAR) .buildSchema(); ResultSetOptions options = new OptionBuilder() .setRowCountLimit(ValueVector.MAX_ROW_COUNT) .setSchema(schema) .build(); ResultSetLoader rsLoader = new ResultSetLoaderImpl(fixture.allocator(), options); RowSetLoader rootWriter = rsLoader.writer(); rsLoader.startBatch(); byte value[] = new byte[512]; Arrays.fill(value, (byte) 'X'); int count = 0; while (! rootWriter.isFull()) { rootWriter.start(); rootWriter.scalar(0).setBytes(value, value.length); rootWriter.save(); count++; } // Number of rows should be driven by vector size. // Our row count should include the overflow row int expectedCount = ValueVector.MAX_BUFFER_SIZE / value.length; assertEquals(expectedCount + 1, count); // Loader's row count should include only "visible" rows assertEquals(expectedCount, rootWriter.rowCount()); // Total count should include invisible and look-ahead rows. assertEquals(expectedCount + 1, rsLoader.totalRowCount()); // Result should exclude the overflow row RowSet result = fixture.wrap(rsLoader.harvest()); assertEquals(expectedCount, result.rowCount()); result.clear(); // Next batch should start with the overflow row rsLoader.startBatch(); assertEquals(1, rootWriter.rowCount()); assertEquals(expectedCount + 1, rsLoader.totalRowCount()); result = fixture.wrap(rsLoader.harvest()); assertEquals(1, result.rowCount()); result.clear(); rsLoader.close(); } /** * Test that the writer detects a vector overflow. The offending column * value should be moved to the next batch. */ @Test public void testBatchSizeLimit() { TupleMetadata schema = new SchemaBuilder() .add("s", MinorType.VARCHAR) .buildSchema(); ResultSetOptions options = new OptionBuilder() .setRowCountLimit(ValueVector.MAX_ROW_COUNT) .setSchema(schema) .setBatchSizeLimit( 8 * 1024 * 1024 + // Data 2 * ValueVector.MAX_ROW_COUNT * 4) // Offsets, doubled because of +1 .build(); ResultSetLoader rsLoader = new ResultSetLoaderImpl(fixture.allocator(), options); RowSetLoader rootWriter = rsLoader.writer(); rsLoader.startBatch(); byte value[] = new byte[512]; Arrays.fill(value, (byte) 'X'); int count = 0; while (! rootWriter.isFull()) { rootWriter.start(); rootWriter.scalar(0).setBytes(value, value.length); rootWriter.save(); count++; } // Our row count should include the overflow row int expectedCount = 8 * 1024 * 1024 / value.length; assertEquals(expectedCount + 1, count); // Loader's row count should include only "visible" rows assertEquals(expectedCount, rootWriter.rowCount()); // Total count should include invisible and look-ahead rows. assertEquals(expectedCount + 1, rsLoader.totalRowCount()); // Result should exclude the overflow row RowSet result = fixture.wrap(rsLoader.harvest()); assertEquals(expectedCount, result.rowCount()); result.clear(); // Next batch should start with the overflow row rsLoader.startBatch(); assertEquals(1, rootWriter.rowCount()); assertEquals(expectedCount + 1, rsLoader.totalRowCount()); result = fixture.wrap(rsLoader.harvest()); assertEquals(1, result.rowCount()); result.clear(); rsLoader.close(); } /** * Load a batch to overflow. Then, close the loader with the overflow * batch unharvested. The Loader should release the memory allocated * to the unused overflow vectors. */ @Test public void testCloseWithOverflow() { TupleMetadata schema = new SchemaBuilder() .add("s", MinorType.VARCHAR) .buildSchema(); ResultSetOptions options = new OptionBuilder() .setRowCountLimit(ValueVector.MAX_ROW_COUNT) .setSchema(schema) .build(); ResultSetLoader rsLoader = new ResultSetLoaderImpl(fixture.allocator(), options); RowSetLoader rootWriter = rsLoader.writer(); rsLoader.startBatch(); byte value[] = new byte[512]; Arrays.fill(value, (byte) 'X'); int count = 0; while (! rootWriter.isFull()) { rootWriter.start(); rootWriter.scalar(0).setBytes(value, value.length); rootWriter.save(); count++; } assertTrue(count < ValueVector.MAX_ROW_COUNT); // Harvest the full batch RowSet result = fixture.wrap(rsLoader.harvest()); result.clear(); // Close without harvesting the overflow batch. rsLoader.close(); } /** * Case where a single array fills up the vector to the maximum size * limit. Overflow won't work here; the attempt will fail with a user * exception. */ @Test public void testOversizeArray() { TupleMetadata schema = new SchemaBuilder() .addArray("s", MinorType.VARCHAR) .buildSchema(); ResultSetOptions options = new OptionBuilder() .setRowCountLimit(ValueVector.MAX_ROW_COUNT) .setSchema(schema) .build(); ResultSetLoader rsLoader = new ResultSetLoaderImpl(fixture.allocator(), options); RowSetLoader rootWriter = rsLoader.writer(); // Create a single array as the column value in the first row. When // this overflows, an exception is thrown since overflow is not possible. rsLoader.startBatch(); byte value[] = new byte[473]; Arrays.fill(value, (byte) 'X'); rootWriter.start(); ScalarWriter array = rootWriter.array(0).scalar(); try { for (int i = 0; i < ValueVector.MAX_ROW_COUNT; i++) { array.setBytes(value, value.length); } fail(); } catch (UserException e) { assertTrue(e.getMessage().contains("column value is larger than the maximum")); } rsLoader.close(); } /** * Test a row with a single array column which overflows. Verifies * that all the fiddly bits about offset vectors and so on works * correctly. Run this test (the simplest case) if you change anything * about the array handling code. */ @Test public void testSizeLimitOnArray() { TupleMetadata schema = new SchemaBuilder() .addArray("s", MinorType.VARCHAR) .buildSchema(); ResultSetOptions options = new OptionBuilder() .setRowCountLimit(ValueVector.MAX_ROW_COUNT) .setSchema(schema) .build(); ResultSetLoader rsLoader = new ResultSetLoaderImpl(fixture.allocator(), options); RowSetLoader rootWriter = rsLoader.writer(); // Fill batch with rows of with a single array, three values each. Tack on // a suffix to each so we can be sure the proper data is written and moved // to the overflow batch. rsLoader.startBatch(); byte value[] = new byte[473]; Arrays.fill(value, (byte) 'X'); String strValue = new String(value, Charsets.UTF_8); int count = 0; int rowSize = 0; int totalSize = 0; int valuesPerArray = 13; while (rootWriter.start()) { totalSize += rowSize; rowSize = 0; ScalarWriter array = rootWriter.array(0).scalar(); for (int i = 0; i < valuesPerArray; i++) { String cellValue = strValue + (count + 1) + "." + i; array.setString(cellValue); rowSize += cellValue.length(); } rootWriter.save(); count++; } // Row count should include the overflow row. int expectedCount = count - 1; // Size without overflow row should fit in the vector, size // with overflow should not. assertTrue(totalSize <= ValueVector.MAX_BUFFER_SIZE); assertTrue(totalSize + rowSize > ValueVector.MAX_BUFFER_SIZE); // Result should exclude the overflow row. Last row // should hold the last full array. RowSet result = fixture.wrap(rsLoader.harvest()); assertEquals(expectedCount, result.rowCount()); RowSetReader reader = result.reader(); reader.set(expectedCount - 1); ScalarElementReader arrayReader = reader.column(0).elements(); assertEquals(valuesPerArray, arrayReader.size()); for (int i = 0; i < valuesPerArray; i++) { String cellValue = strValue + (count - 1) + "." + i; assertEquals(cellValue, arrayReader.getString(i)); } result.clear(); // Next batch should start with the overflow row. // The only row in this next batch should be the whole // array being written at the time of overflow. rsLoader.startBatch(); // VectorPrinter.printStrings((VarCharVector) ((VarCharColumnWriter) rootWriter.array(0).scalar()).vector(), 0, 5); // ((ResultSetLoaderImpl) rsLoader).dump(new HierarchicalPrinter()); assertEquals(1, rootWriter.rowCount()); assertEquals(expectedCount + 1, rsLoader.totalRowCount()); result = fixture.wrap(rsLoader.harvest()); // VectorPrinter.printStrings((VarCharVector) ((VarCharColumnWriter) rootWriter.array(0).scalar()).vector(), 0, 5); assertEquals(1, result.rowCount()); reader = result.reader(); reader.next(); arrayReader = reader.column(0).elements(); assertEquals(valuesPerArray, arrayReader.size()); for (int i = 0; i < valuesPerArray; i++) { String cellValue = strValue + (count) + "." + i; assertEquals(cellValue, arrayReader.getString(i)); } result.clear(); rsLoader.close(); } /** * Test the complete set of array overflow cases: * <ul> * <li>Array a is written before the column that has overflow, * and must be copied, in its entirety, to the overflow row.</li> * <li>Column b causes the overflow.</li> * <li>Column c is written after the overflow, and should go * to the look-ahead row.</li> * <li>Column d is written for a while, then has empties before * the overflow row, but is written in the overflow row.<li> * <li>Column e is like d, but is not written in the overflow * row.</li> */ @Test public void testArrayOverflowWithOtherArrays() { TupleMetadata schema = new SchemaBuilder() .addArray("a", MinorType.INT) .addArray("b", MinorType.VARCHAR) .addArray("c", MinorType.INT) .addArray("d", MinorType.INT) .buildSchema(); ResultSetOptions options = new OptionBuilder() .setRowCountLimit(ValueVector.MAX_ROW_COUNT) .setSchema(schema) .build(); ResultSetLoader rsLoader = new ResultSetLoaderImpl(fixture.allocator(), options); RowSetLoader rootWriter = rsLoader.writer(); // Fill batch with rows of with a single array, three values each. Tack on // a suffix to each so we can be sure the proper data is written and moved // to the overflow batch. byte value[] = new byte[512]; Arrays.fill(value, (byte) 'X'); String strValue = new String(value, Charsets.UTF_8); int aCount = 3; int bCount = 11; int cCount = 5; int dCount = 7; int cCutoff = ValueVector.MAX_BUFFER_SIZE / value.length / bCount / 2; ScalarWriter aWriter = rootWriter.array("a").scalar(); ScalarWriter bWriter = rootWriter.array("b").scalar(); ScalarWriter cWriter = rootWriter.array("c").scalar(); ScalarWriter dWriter = rootWriter.array("d").scalar(); int count = 0; rsLoader.startBatch(); while (rootWriter.start()) { if (rootWriter.rowCount() == 2952) { count = count + 0; } for (int i = 0; i < aCount; i++) { aWriter.setInt(count * aCount + i); } for (int i = 0; i < bCount; i++) { String cellValue = strValue + (count * bCount + i); bWriter.setString(cellValue); } if (count < cCutoff) { for (int i = 0; i < cCount; i++) { cWriter.setInt(count * cCount + i); } } // Relies on fact that isFull becomes true right after // a vector overflows; don't have to wait for saveRow(). if (count < cCutoff || rootWriter.isFull()) { for (int i = 0; i < dCount; i++) { dWriter.setInt(count * dCount + i); } } rootWriter.save(); count++; } // Verify RowSet result = fixture.wrap(rsLoader.harvest()); assertEquals(count - 1, result.rowCount()); RowSetReader reader = result.reader(); ScalarElementReader aReader = reader.array("a").elements(); ScalarElementReader bReader = reader.array("b").elements(); ScalarElementReader cReader = reader.array("c").elements(); ScalarElementReader dReader = reader.array("d").elements(); while (reader.next()) { int rowId = reader.rowIndex(); assertEquals(aCount, aReader.size()); for (int i = 0; i < aCount; i++) { assertEquals(rowId * aCount + i, aReader.getInt(i)); } assertEquals(bCount, bReader.size()); for (int i = 0; i < bCount; i++) { String cellValue = strValue + (rowId * bCount + i); assertEquals(cellValue, bReader.getString(i)); } if (rowId < cCutoff) { assertEquals(cCount, cReader.size()); for (int i = 0; i < cCount; i++) { assertEquals(rowId * cCount + i, cReader.getInt(i)); } assertEquals(dCount, dReader.size()); for (int i = 0; i < dCount; i++) { assertEquals(rowId * dCount + i, dReader.getInt(i)); } } else { assertEquals(0, cReader.size()); assertEquals(0, dReader.size()); } } result.clear(); int firstCount = count - 1; // One row is in the batch. Write more, skipping over the // initial few values for columns c and d. Column d has a // roll-over value, c has an empty roll-over. rsLoader.startBatch(); for (int j = 0; j < 5; j++) { rootWriter.start(); for (int i = 0; i < aCount; i++) { aWriter.setInt(count * aCount + i); } for (int i = 0; i < bCount; i++) { String cellValue = strValue + (count * bCount + i); bWriter.setString(cellValue); } if (j > 3) { for (int i = 0; i < cCount; i++) { cWriter.setInt(count * cCount + i); } for (int i = 0; i < dCount; i++) { dWriter.setInt(count * dCount + i); } } rootWriter.save(); count++; } result = fixture.wrap(rsLoader.harvest()); assertEquals(6, result.rowCount()); reader = result.reader(); aReader = reader.array("a").elements(); bReader = reader.array("b").elements(); cReader = reader.array("c").elements(); dReader = reader.array("d").elements(); int j = 0; while (reader.next()) { int rowId = firstCount + reader.rowIndex(); assertEquals(aCount, aReader.size()); for (int i = 0; i < aCount; i++) { assertEquals("Index " + i, rowId * aCount + i, aReader.getInt(i)); } assertEquals(bCount, bReader.size()); for (int i = 0; i < bCount; i++) { String cellValue = strValue + (rowId * bCount + i); assertEquals(cellValue, bReader.getString(i)); } if (j > 4) { assertEquals(cCount, cReader.size()); for (int i = 0; i < cCount; i++) { assertEquals(rowId * cCount + i, cReader.getInt(i)); } } else { assertEquals(0, cReader.size()); } if (j == 0 || j > 4) { assertEquals(dCount, dReader.size()); for (int i = 0; i < dCount; i++) { assertEquals(rowId * dCount + i, dReader.getInt(i)); } } else { assertEquals(0, dReader.size()); } j++; } result.clear(); rsLoader.close(); } /** * Create an array that contains more than 64K values. Drill has no numeric * limit on array lengths. (Well, it does, but the limit is about 2 billion * which, even for bytes, is too large to fit into a vector...) */ @Test public void testLargeArray() { ResultSetLoader rsLoader = new ResultSetLoaderImpl(fixture.allocator()); RowSetLoader rootWriter = rsLoader.writer(); MaterializedField field = SchemaBuilder.columnSchema("a", MinorType.INT, DataMode.REPEATED); rootWriter.addColumn(field); // Create a single array as the column value in the first row. When // this overflows, an exception is thrown since overflow is not possible. rsLoader.startBatch(); rootWriter.start(); ScalarWriter array = rootWriter.array(0).scalar(); try { for (int i = 0; i < Integer.MAX_VALUE; i++) { array.setInt(i+1); } fail(); } catch (UserException e) { // Expected } rsLoader.close(); } /** * Test the case that an array has "missing values" before the overflow. */ @Test public void testMissingArrayValues() { TupleMetadata schema = new SchemaBuilder() .add("a", MinorType.INT) .add("b", MinorType.VARCHAR) .addArray("c", MinorType.INT) .buildSchema(); ResultSetOptions options = new OptionBuilder() .setRowCountLimit(ValueVector.MAX_ROW_COUNT) .setSchema(schema) .build(); ResultSetLoader rsLoader = new ResultSetLoaderImpl(fixture.allocator(), options); RowSetLoader rootWriter = rsLoader.writer(); byte value[] = new byte[512]; Arrays.fill(value, (byte) 'X'); int blankAfter = ValueVector.MAX_BUFFER_SIZE / 512 * 2 / 3; ScalarWriter cWriter = rootWriter.array("c").scalar(); rsLoader.startBatch(); int rowId = 0; while (rootWriter.start()) { rootWriter.scalar("a").setInt(rowId); rootWriter.scalar("b").setBytes(value, value.length); if (rowId < blankAfter) { for (int i = 0; i < 3; i++) { cWriter.setInt(rowId * 3 + i); } } rootWriter.save(); rowId++; } RowSet result = fixture.wrap(rsLoader.harvest()); assertEquals(rowId - 1, result.rowCount()); RowSetReader reader = result.reader(); ScalarElementReader cReader = reader.array("c").elements(); while (reader.next()) { assertEquals(reader.rowIndex(), reader.scalar("a").getInt()); assertTrue(Arrays.equals(value, reader.scalar("b").getBytes())); if (reader.rowIndex() < blankAfter) { assertEquals(3, cReader.size()); for (int i = 0; i < 3; i++) { assertEquals(reader.rowIndex() * 3 + i, cReader.getInt(i)); } } else { assertEquals(0, cReader.size()); } } result.clear(); rsLoader.close(); } @Test public void testOverflowWithNullables() { TupleMetadata schema = new SchemaBuilder() .add("n", MinorType.INT) .addNullable("a", MinorType.VARCHAR) .addNullable("b", MinorType.VARCHAR) .addNullable("c", MinorType.VARCHAR) .buildSchema(); ResultSetOptions options = new OptionBuilder() .setRowCountLimit(ValueVector.MAX_ROW_COUNT) .setSchema(schema) .build(); ResultSetLoader rsLoader = new ResultSetLoaderImpl(fixture.allocator(), options); RowSetLoader rootWriter = rsLoader.writer(); rsLoader.startBatch(); byte value[] = new byte[512]; Arrays.fill(value, (byte) 'X'); int count = 0; while (! rootWriter.isFull()) { rootWriter.start(); rootWriter.scalar(0).setInt(count); rootWriter.scalar(1).setNull(); rootWriter.scalar(2).setBytes(value, value.length); rootWriter.scalar(3).setNull(); rootWriter.save(); count++; } // Result should exclude the overflow row RowSet result = fixture.wrap(rsLoader.harvest()); assertEquals(count - 1, result.rowCount()); RowSetReader reader = result.reader(); while (reader.next()) { assertEquals(reader.rowIndex(), reader.scalar(0).getInt()); assertTrue(reader.scalar(1).isNull()); assertTrue(Arrays.equals(value, reader.scalar(2).getBytes())); assertTrue(reader.scalar(3).isNull()); } result.clear(); // Next batch should start with the overflow row rsLoader.startBatch(); result = fixture.wrap(rsLoader.harvest()); reader = result.reader(); assertEquals(1, result.rowCount()); assertTrue(reader.next()); assertEquals(count - 1, reader.scalar(0).getInt()); assertTrue(reader.scalar(1).isNull()); assertTrue(Arrays.equals(value, reader.scalar(2).getBytes())); assertTrue(reader.scalar(3).isNull()); result.clear(); rsLoader.close(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.wicket.markup.html.form; import java.util.List; import org.apache.wicket.RequestContext; import org.apache.wicket.ajax.form.AjaxFormComponentUpdatingBehavior; import org.apache.wicket.markup.ComponentTag; import org.apache.wicket.model.IModel; import org.apache.wicket.protocol.http.portlet.PortletRequestContext; /** * A choice implemented as a dropdown menu/list. * <p> * Java: * * <pre> * List SITES = Arrays.asList(new String[] { &quot;The Server Side&quot;, &quot;Java Lobby&quot;, &quot;Java.Net&quot; }); * * // Add a dropdown choice component that uses Input's 'site' property to designate the * // current selection, and that uses the SITES list for the available options. * // Note that when the selection is null, Wicket will lookup a localized string to * // represent this null with key: &quot;id + '.null'&quot;. In this case, this is 'site.null' * // which can be found in DropDownChoicePage.properties * form.add(new DropDownChoice(&quot;site&quot;, SITES)); * </pre> * * HTML: * * <pre> * &lt;select wicket:id=&quot;site&quot;&gt; * &lt;option&gt;site 1&lt;/option&gt; * &lt;option&gt;site 2&lt;/option&gt; * &lt;/select&gt; * </pre> * * </p> * * <p> * You can can extend this class and override method wantOnSelectionChangedNotifications() to force * server roundtrips on each selection change. * </p> * * @author Jonathan Locke * @author Eelco Hillenius * @author Johan Compagner * * @param <T> * The model object type */ public class DropDownChoice<T> extends AbstractSingleSelectChoice<T> implements IOnChangeListener { private static final long serialVersionUID = 1L; /** * @see org.apache.wicket.markup.html.form.AbstractChoice#AbstractChoice(String) */ public DropDownChoice(final String id) { super(id); } /** * @see org.apache.wicket.markup.html.form.AbstractChoice#AbstractChoice(String, List) */ public DropDownChoice(final String id, final List<? extends T> choices) { super(id, choices); } /** * @see org.apache.wicket.markup.html.form.AbstractChoice#AbstractChoice(String, * List,IChoiceRenderer) */ public DropDownChoice(final String id, final List<? extends T> data, final IChoiceRenderer<? super T> renderer) { super(id, data, renderer); } /** * @see org.apache.wicket.markup.html.form.AbstractChoice#AbstractChoice(String, IModel, List) */ public DropDownChoice(final String id, IModel<T> model, final List<? extends T> choices) { super(id, model, choices); } /** * @see org.apache.wicket.markup.html.form.AbstractChoice#AbstractChoice(String, IModel, List, * IChoiceRenderer) */ public DropDownChoice(final String id, IModel<T> model, final List<? extends T> data, final IChoiceRenderer<? super T> renderer) { super(id, model, data, renderer); } /** * @see org.apache.wicket.markup.html.form.AbstractChoice#AbstractChoice(String, IModel) */ public DropDownChoice(String id, IModel<? extends List<? extends T>> choices) { super(id, choices); } /** * @see org.apache.wicket.markup.html.form.AbstractChoice#AbstractChoice(String, IModel,IModel) */ public DropDownChoice(String id, IModel<T> model, IModel<? extends List<? extends T>> choices) { super(id, model, choices); } /** * @see org.apache.wicket.markup.html.form.AbstractChoice#AbstractChoice(String, * IModel,IChoiceRenderer) */ public DropDownChoice(String id, IModel<? extends List<? extends T>> choices, IChoiceRenderer<? super T> renderer) { super(id, choices, renderer); } /** * @see org.apache.wicket.markup.html.form.AbstractChoice#AbstractChoice(String, IModel, * IModel,IChoiceRenderer) */ public DropDownChoice(String id, IModel<T> model, IModel<? extends List<? extends T>> choices, IChoiceRenderer<? super T> renderer) { super(id, model, choices, renderer); } /** * Called when a selection changes. */ public final void onSelectionChanged() { convertInput(); updateModel(); onSelectionChanged(getModelObject()); } /** * Processes the component tag. * * @param tag * Tag to modify * @see org.apache.wicket.Component#onComponentTag(org.apache.wicket.markup.ComponentTag) */ @Override protected void onComponentTag(final ComponentTag tag) { checkComponentTag(tag, "select"); // Should a roundtrip be made (have onSelectionChanged called) when the // selection changed? if (wantOnSelectionChangedNotifications()) { // url that points to this components IOnChangeListener method CharSequence url = urlFor(IOnChangeListener.INTERFACE); Form<?> form = findParent(Form.class); if (form != null) { RequestContext rc = RequestContext.get(); if (rc.isPortletRequest()) { // restore url back to real wicket path as its going to be interpreted by the // form itself url = ((PortletRequestContext)rc).getLastEncodedPath(); } tag.put("onchange", form.getJsForInterfaceUrl(url)); } else { // TODO: following doesn't work with portlets, should be posted to a dynamic hidden // form // with an ActionURL or something tag.put("onchange", "window.location.href='" + url + (url.toString().indexOf('?') > -1 ? "&amp;" : "?") + getInputName() + "=' + this.options[this.selectedIndex].value;"); } } super.onComponentTag(tag); } /** * Template method that can be overridden by clients that implement IOnChangeListener to be * notified by onChange events of a select element. This method does nothing by default. * <p> * Called when a option is selected of a dropdown list that wants to be notified of this event. * This method is to be implemented by clients that want to be notified of selection events. * * @param newSelection * The newly selected object of the backing model NOTE this is the same as you would * get by calling getModelObject() if the new selection were current */ protected void onSelectionChanged(final T newSelection) { } /** * Whether this component's onSelectionChanged event handler should be called using javascript * <tt>window.location</tt> if the selection changes. If true, a roundtrip will be generated * with each selection change, resulting in the model being updated (of just this component) and * onSelectionChanged being called. This method returns false by default. If you wish to use * Ajax instead, let {@link #wantOnSelectionChangedNotifications()} return false and add an * {@link AjaxFormComponentUpdatingBehavior} to the component using the <tt>onchange</tt> event. * * @return True if this component's onSelectionChanged event handler should called using * javascript if the selection changes */ protected boolean wantOnSelectionChangedNotifications() { return false; } /** * @see org.apache.wicket.MarkupContainer#getStatelessHint() */ @Override protected boolean getStatelessHint() { if (wantOnSelectionChangedNotifications()) { return false; } return super.getStatelessHint(); } }
/* * Copyright 2002-2017 the original author or authors. * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with * the License. You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See * the License for the * specific language governing permissions and limitations under the License. */ package com.phoenixnap.oss.ramlplugin.raml2code.helpers; import java.io.IOException; import java.math.BigDecimal; import java.net.URI; import java.util.Date; import java.util.Iterator; import java.util.Map; import org.apache.commons.io.IOUtils; import org.jsonschema2pojo.Annotator; import org.jsonschema2pojo.DefaultGenerationConfig; import org.jsonschema2pojo.GenerationConfig; import org.jsonschema2pojo.Jackson2Annotator; import org.jsonschema2pojo.SchemaGenerator; import org.jsonschema2pojo.SchemaMapper; import org.jsonschema2pojo.SchemaStore; import org.jsonschema2pojo.rules.RuleFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.util.StringUtils; import org.springframework.web.multipart.MultipartFile; import com.phoenixnap.oss.ramlplugin.raml2code.data.ApiBodyMetadata; import com.phoenixnap.oss.ramlplugin.raml2code.plugin.Config; import com.phoenixnap.oss.ramlplugin.raml2code.raml.RamlParamType; import com.phoenixnap.oss.ramlplugin.raml2code.raml.RamlRoot; import com.sun.codemodel.JCodeModel; import com.sun.codemodel.JPackage; /** * Class containing convenience methods relating to the extracting of * information from Java types for use as Parameters. These can either be * decomposed into RAML Simple Types (Similar to Java primitives) or JSON Schema * for more complex objects * * @author Kurt Paris * @since 0.0.1 * */ public class SchemaHelper { protected static final Logger logger = LoggerFactory.getLogger(SchemaHelper.class); /** * Utility method that will return a schema if the identifier is valid and * exists in the raml file definition. * * @param schema * The name of the schema to resolve * @param document * The Parent Raml Document * @return The full schema if contained in the raml document or null if not * found */ public static String resolveSchema(String schema, RamlRoot document) { if (document == null || schema == null || schema.indexOf("{") != -1) { return null; } if (document.getSchemas() != null && !document.getSchemas().isEmpty()) { for (Map<String, String> map : document.getSchemas()) { if (map.containsKey(schema)) { return map.get(schema); } } } return null; } /** * Maps simple types supported by RAML into primitives and other simple Java * types * * @param param * The Type to map * @param format * Number format specified * @param rawType * RAML type * @return The Java Class which maps to this Simple RAML ParamType or string * if one is not found */ public static Class<?> mapSimpleType(RamlParamType param, String format, String rawType) { switch (param) { case BOOLEAN: return Boolean.class; case DATE: return mapDateFormat(rawType); case INTEGER: { Class<?> fromFormat = mapNumberFromFormat(format); if (fromFormat == Double.class) { throw new IllegalStateException(); } if (fromFormat == null) { return Long.class; // retained for backward compatibility } else { return fromFormat; } } case NUMBER: { Class<?> fromFormat = mapNumberFromFormat(format); if (fromFormat == null) { return BigDecimal.class; // retained for backward // compatibility } else { return fromFormat; } } case FILE: return MultipartFile.class; default: return String.class; } } public static Class<?> mapDateFormat(String rawType) { String param = rawType.toUpperCase(); try { switch (param) { case "DATE-ONLY": String dateType = Config.getPojoConfig().getDateType(); if (StringUtils.hasText(dateType)) { return Class.forName(dateType); } break; case "TIME-ONLY": String timeType = Config.getPojoConfig().getTimeType(); if (StringUtils.hasText(timeType)) { return Class.forName(timeType); } break; default: String dateTimeType = Config.getPojoConfig().getDateTimeType(); if (StringUtils.hasText(dateTimeType)) { return Class.forName(dateTimeType); } } } catch (ClassNotFoundException e) { logger.error("Error trying to find class for date type: " + rawType); } return Date.class; } private static Class<?> mapNumberFromFormat(String format) { if (format == null) { return null; } if (format.equals("int64") || format.equals("long")) { return Long.class; } else if (format.equals("int32") || format.equals("int")) { return Integer.class; } else if (format.equals("int16") || format.equals("int8")) { return Short.class; } else if (format.equals("double") || format.equals("float")) { return Double.class; } return null; } /** * Extracts the name from a schema in this order of precedence: 1. If the * schema contains an ID element 2. The name of the schema within the RAML * document 3. The autogenerated name based on the enclosing method * * @param schema * The Actual JSON Schema * @param schemaName * The name of the schema within the document * @param fallbackName * any arbitrary name * @return The Name for this Class (POJO) */ public static String extractNameFromSchema(String schema, String schemaName, String fallbackName) { String resolvedName = null; if (schema != null) { // if we have an array type we need to recurse into it int startIdx = 0; String type = extractTopItem("type", schema, startIdx); if (type != null && type.equalsIgnoreCase("array")) { int itemsIdx = schema.indexOf("\"items\""); if (itemsIdx != -1) { startIdx = itemsIdx + 7; } // lets check if we have a ref String ref = extractTopItem("$ref", schema, startIdx); if (ref != null) { logger.info("Loading referenced schema " + ref); ref = ref.replace("classpath:", ""); try { schema = IOUtils.toString(Thread.currentThread().getContextClassLoader().getResourceAsStream(ref), "UTF-8"); startIdx = 0; // reset pointer since we recursed into // schema } catch (IOException e) { logger.info("Erro Loading referenced schema " + ref, e); } } } // check if javaType can give us exact name String javaType = extractTopItem("javaType", schema, startIdx); if (StringUtils.hasText(javaType)) { // do stuff to it int dotIdx = javaType.lastIndexOf("."); if (dotIdx > -1) { javaType = javaType.substring(dotIdx + 1); } resolvedName = javaType; } else { String id = extractTopItem("id", schema, startIdx); if (StringUtils.hasText(id)) { // do stuff to it if (id.startsWith("urn:") && ((id.lastIndexOf(":") + 1) < id.length())) { id = id.substring(id.lastIndexOf(":") + 1); } else if (id.startsWith(JSON_SCHEMA_IDENT)) { if (id.length() > (JSON_SCHEMA_IDENT.length() + 3)) { id = id.substring(JSON_SCHEMA_IDENT.length()); } } resolvedName = StringUtils.capitalize(id); } if (!NamingHelper.isValidJavaClassName(resolvedName)) { if (NamingHelper.isValidJavaClassName(schemaName)) { return StringUtils.capitalize(schemaName); // try schema // name } else { resolvedName = fallbackName; // fallback to generated } } } } return resolvedName; } /** * Extracts the value of a specified parameter in a schema * * @param searchString * element to search for * @param schema * Schema as a string * @return the value or null if not found */ private static String extractTopItem(String searchString, String schema, int startIdx) { String extracted = null; int propIdx = schema.indexOf("\"properties\"", startIdx); if (propIdx == -1) { propIdx = Integer.MAX_VALUE; } // check for second int idIdx = schema.indexOf("\"" + searchString + "\"", startIdx); int secondIdIdx = schema.indexOf("\"" + searchString + "\"", idIdx + 1); if (secondIdIdx != -1 && propIdx > secondIdIdx) { idIdx = secondIdIdx; } if (idIdx != -1 && propIdx > idIdx) { // make sure we're not in a nested // id // find the 1st and second " after the idx int valueStartIdx = schema.indexOf("\"", idIdx + (searchString.length() + 2)); int valueEndIdx = schema.indexOf("\"", valueStartIdx + 1); extracted = schema.substring(valueStartIdx + 1, valueEndIdx); } return extracted; } private static String JSON_SCHEMA_IDENT = "http://jsonschema.net"; /** * Maps a JSON Schema to a JCodeModel using JSONSchema2Pojo and encapsulates * it along with some metadata into an {@link ApiBodyMetadata} object. * * @param document * The Raml document being parsed * @param schema * The Schema (full schema or schema name to be resolved) * @param basePackage * The base package for the classes we are generating * @param name * The suggested name of the class based on the api call and * whether it's a request/response. This will only be used if no * suitable alternative is found in the schema * @param schemaLocation * Base location of this schema, will be used to create absolute * URIs for $ref tags eg "classpath:/" * @return Object representing this Body */ public static ApiBodyMetadata mapSchemaToPojo(RamlRoot document, String schema, String basePackage, String name, String schemaLocation) { String resolvedName = null; String schemaName = schema; // Check if we have the name of a schema or an actual schema String resolvedSchema = SchemaHelper.resolveSchema(schema, document); if (resolvedSchema == null) { resolvedSchema = schema; schemaName = null; } // Extract name from schema resolvedName = extractNameFromSchema(resolvedSchema, schemaName, name); JCodeModel codeModel = buildBodyJCodeModel(basePackage, StringUtils.hasText(schemaLocation) ? schemaLocation : "classpath:/", resolvedName, resolvedSchema, null); if (codeModel != null) { if (codeModel.countArtifacts() == 1) { try { // checking has next twice might be more efficient but this // is more readable, if // we ever run into speed issues here..optimise Iterator<JPackage> packages = codeModel.packages(); // in the case that we have empty packages we need to skip // them to get to the // class JPackage nextPackage = packages.next(); while (!nextPackage.classes().hasNext() && packages.hasNext()) { nextPackage = packages.next(); } resolvedName = nextPackage.classes().next().name(); } catch (NullPointerException npe) { // do nothing, use previous name } } return new ApiBodyMetadata(resolvedName, resolvedSchema, codeModel); } else { return null; } } /** * Builds a JCodeModel for classes that will be used as Request or Response * bodies * * @param basePackage * The package we will be using for the domain objects * @param schemaLocation * The location of this schema, will be used to create absolute * URIs for $ref tags eg "classpath:/" * @param name * The class name * @param schema * The JSON Schema representing this class * @param annotator * JsonSchema2Pojo annotator. if null a default annotator will be * used * @return built JCodeModel */ public static JCodeModel buildBodyJCodeModel(String basePackage, String schemaLocation, String name, String schema, Annotator annotator) { JCodeModel codeModel = new JCodeModel(); SchemaStore schemaStore = new SchemaStore(); GenerationConfig config = Config.getPojoConfig(); if (config == null) { config = getDefaultGenerationConfig(); } if (annotator == null) { annotator = new Jackson2Annotator(config); } RuleFactory ruleFactory = new RuleFactory(config, annotator, schemaStore); SchemaMapper mapper = new SchemaMapper(ruleFactory, new SchemaGenerator()); boolean useParent = StringUtils.hasText(schemaLocation); try { if (useParent) { mapper.generate(codeModel, name, basePackage, schema, new URI(schemaLocation)); } else { mapper.generate(codeModel, name, basePackage, schema); } } catch (Exception e) { // TODO make this smarter by checking refs if (useParent && e.getMessage().contains("classpath")) { logger.debug("Referenced Schema contains self $refs or not found in classpath. Regenerating model withouth classpath: for " + name); codeModel = new JCodeModel(); try { mapper.generate(codeModel, name, basePackage, schema); return codeModel; } catch (IOException e1) { // do nothing } } logger.error("Error generating pojo from schema" + name, e); return null; } return codeModel; } /** * Returns a configuration for the JSON Schema 2 POJO that is in line with * the defaults used in the plugin so far * * @return Default Generation Config */ public static GenerationConfig getDefaultGenerationConfig() { return getGenerationConfig(true, false, false, false); } /** * Returns a generation config with the supplied parameters. If any of these * parameters are supplied null, it will use the value defined in the * default configuration * * @param generateBuilders * Enables or disables * {@link GenerationConfig#isGenerateBuilders()} * @param includeAdditionalProperties * Enables or disables * {@link GenerationConfig#isIncludeAdditionalProperties()} * @param includeDynamicAccessors * Enables or disables * {@link GenerationConfig#isIncludeDynamicAccessors()} * @param useLongIntegers * Enables or disables * {@link GenerationConfig#isUseLongIntegers()} * @return The GenerationConfig */ public static GenerationConfig getGenerationConfig(Boolean generateBuilders, Boolean includeAdditionalProperties, Boolean includeDynamicAccessors, Boolean useLongIntegers) { return new DefaultGenerationConfig() { @Override public boolean isGenerateBuilders() { // set config option by // overriding method if (generateBuilders != null) { return generateBuilders; } else { return true; } } @Override public boolean isIncludeAdditionalProperties() { if (includeAdditionalProperties != null) { return includeAdditionalProperties; } else { return false; } } @Override public boolean isIncludeDynamicAccessors() { if (includeDynamicAccessors != null) { return includeDynamicAccessors; } else { return false; } } @Override public boolean isUseLongIntegers() { if (useLongIntegers != null) { return useLongIntegers; } else { return super.isUseLongIntegers(); } } }; } }
package com.opennote.ui.fragment; import android.app.AlertDialog; import android.app.Fragment; import android.content.Context; import android.content.DialogInterface; import android.content.SharedPreferences; import android.graphics.Typeface; import android.os.Bundle; import android.text.InputType; import android.text.method.PasswordTransformationMethod; import android.view.LayoutInflater; import android.view.View; import android.view.View.OnClickListener; import android.view.ViewGroup; import android.widget.Button; import android.widget.CheckBox; import android.widget.CompoundButton; import android.widget.CompoundButton.OnCheckedChangeListener; import android.widget.EditText; import android.widget.ImageButton; import android.widget.Toast; import com.foxykeep.datadroid.requestmanager.Request; import com.foxykeep.datadroid.requestmanager.RequestManager.RequestListener; import com.opennote.R; import com.opennote.model.ErrorFactory; import com.opennote.model.RequestFactory; import com.opennote.model.RestRequestManager; import com.opennote.model.service.RestService; import com.opennote.ui.activity.MainActivity; public class UserFragment extends Fragment { View mRootView; EditText mFullNameEdit; EditText mEmailEdit; // EditText mPassEdit; @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { mRootView = inflater.inflate(R.layout.fragment_user, container, false); // Get Preferences values SharedPreferences sharedPref = getActivity().getSharedPreferences(getString(R.string.preference_file_key), Context.MODE_PRIVATE); String login = sharedPref.getString(getString(R.string.user_login), null); String fullName = sharedPref.getString(getString(R.string.user_full_name), null); String email = sharedPref.getString(getString(R.string.user_email), null); // Get EtitText EditText loginEdit = (EditText) mRootView.findViewById(R.id.userLoginEdit); mFullNameEdit = (EditText) mRootView.findViewById(R.id.userFullNameEdit); mEmailEdit = (EditText) mRootView.findViewById(R.id.userEmailEdit); // // Swap EtitText values // fullName = mFullNameEdit.getText().toString(); // email = mEmailEdit.getText().toString(); // Set Preferences values to EditTexts loginEdit.setText(login); mFullNameEdit.setText(fullName); mEmailEdit.setText(email); ImageButton logOutBt = (ImageButton) mRootView.findViewById(R.id.userLogOutBt); ImageButton editPassBt = (ImageButton) mRootView.findViewById(R.id.userEditPassBt); Button saveChangesBt = (Button) mRootView.findViewById(R.id.userSaveBt); logOutBt.setOnClickListener(mLogOutAction); saveChangesBt.setOnClickListener(mSaveChangesAction); editPassBt.setOnClickListener(mEditPassAction); return mRootView; } private OnClickListener mLogOutAction = new OnClickListener() { @Override public void onClick(View view) { AlertDialog.Builder builderInner = new AlertDialog.Builder(getActivity()); builderInner.setTitle("Alert"); builderInner.setMessage("Logout?"); builderInner.setPositiveButton("OK", new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int index) { SharedPreferences sharedPref = getActivity().getSharedPreferences(getActivity().getString(R.string.preference_file_key), Context.MODE_PRIVATE); SharedPreferences.Editor editor = sharedPref.edit(); editor.putString(getActivity().getString(R.string.session_hash), null); editor.commit(); getActivity().finish(); startActivity(getActivity().getIntent()); } }); builderInner.setNegativeButton("Cancel", new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int index) { dialog.dismiss(); } }); builderInner.show(); } }; private OnClickListener mSaveChangesAction = new OnClickListener() { @Override public void onClick(View arg0) { // DataDroid RequestManager RestRequestManager requestManager = RestRequestManager.from(getActivity()); Request request = RequestFactory.getEditSimpleDataOperation( MainActivity.instance.getSessionHash() , mFullNameEdit.getText().toString() , mEmailEdit.getText().toString()); requestManager.execute(request, mEditSimpleRequestListener); } }; private OnClickListener mEditPassAction = new OnClickListener() { @Override public void onClick(View v) { AlertDialog.Builder builder = new AlertDialog.Builder(getActivity()); LayoutInflater inflater = getActivity().getLayoutInflater(); final View rootPassView = inflater.inflate(R.layout.new_password, null); CheckBox showPassChBx = (CheckBox) rootPassView.findViewById(R.id.passShowPassChBx); showPassChBx.setOnCheckedChangeListener(new OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { EditText oldPassEdit = (EditText) rootPassView.findViewById(R.id.passOldPassEdit); EditText newPassEdit = (EditText) rootPassView.findViewById(R.id.passNewPassEdit); if (isChecked) { oldPassEdit.setInputType(InputType.TYPE_CLASS_TEXT | InputType.TYPE_TEXT_VARIATION_VISIBLE_PASSWORD); oldPassEdit.setTypeface( Typeface.SANS_SERIF ); newPassEdit.setInputType(InputType.TYPE_CLASS_TEXT | InputType.TYPE_TEXT_VARIATION_VISIBLE_PASSWORD); newPassEdit.setTypeface( Typeface.SANS_SERIF ); } else { oldPassEdit.setTransformationMethod(PasswordTransformationMethod.getInstance()); newPassEdit.setTransformationMethod(PasswordTransformationMethod.getInstance()); } } }); builder.setView(rootPassView); builder.setTitle("Change password"); builder.setPositiveButton("OK", new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int index) { EditText oldPassEdit = (EditText) rootPassView.findViewById(R.id.passOldPassEdit); EditText newPassEdit = (EditText) rootPassView.findViewById(R.id.passNewPassEdit); // DataDroid RequestManager RestRequestManager requestManager = RestRequestManager.from(getActivity()); Request request = RequestFactory.getEditPasswordOperation( MainActivity.instance.getSessionHash() , oldPassEdit.getText().toString() , newPassEdit.getText().toString()); requestManager.execute(request, mEditSimpleRequestListener); dialog.dismiss(); } }); AlertDialog dialog = builder.create(); dialog.show(); } }; private RequestListener mEditSimpleRequestListener = new RequestListener() { @Override public void onRequestFinished(Request request, Bundle resultData) { mRootView.requestFocus(); Toast.makeText(getActivity(), "Saved", Toast.LENGTH_SHORT).show(); } @Override public void onRequestConnectionError(Request request, int statusCode) { ErrorFactory.showConnectionErrorMessage(getActivity()); } @Override public void onRequestDataError(Request request) { throw new UnsupportedOperationException(); } @Override public void onRequestCustomError(Request request, Bundle resultData) { int code = resultData.getInt(RestService.STATUS_CODE); String comment = resultData.getString(RestService.COMMENT); ErrorFactory.doError(getActivity(), code, comment); } }; }
// PathVisio, // a tool for data visualization and analysis using Biological Pathways // Copyright 2006-2011 BiGCaT Bioinformatics // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // package org.pathvisio.core.model; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Set; import junit.framework.TestCase; import org.bridgedb.Xref; import org.bridgedb.bio.BioDataSource; import org.pathvisio.core.data.XrefWithSymbol; import org.pathvisio.core.preferences.PreferenceManager; import org.pathvisio.core.util.Utils; public class Test extends TestCase implements PathwayListener, PathwayElementListener { Pathway data; PathwayElement o; List<PathwayEvent> received; List<PathwayElementEvent> receivedElementEvents; PathwayElement l; private static final File PATHVISIO_BASEDIR = new File ("../.."); public void setUp() { PreferenceManager.init(); data = new Pathway(); data.addListener(this); o = PathwayElement.createPathwayElement(ObjectType.DATANODE); received = new ArrayList<PathwayEvent>(); receivedElementEvents = new ArrayList<PathwayElementEvent>(); o.addListener(this); data.add (o); l = PathwayElement.createPathwayElement(ObjectType.LINE); data.add(l); received.clear(); receivedElementEvents.clear(); } public void testFields () { o.setMCenterX(1.0); assertEquals ("test set/get CenterX", 1.0, o.getMCenterX(), 0.0001); assertEquals ("Setting CenterX should generate single event", receivedElementEvents.size(), 1); assertEquals ("test getProperty()", 1.0, (Double)o.getStaticProperty(StaticProperty.CENTERX), 0.0001); try { o.setStaticProperty(StaticProperty.CENTERX, null); fail("Setting centerx property to null should generate exception"); } catch (Exception e) {} // however, you should be able to set graphRef to null assertNull ("graphref null by default", l.getStartGraphRef()); l.setStartGraphRef(null); assertNull ("can set graphRef to null", l.getStartGraphRef()); } public void testProperties() throws IOException, ConverterException { // set 10 dynamic properties in two ways for (int i = 0; i < 5; ++i) { o.setDynamicProperty("Hello" + i, "World" + i); } for (int i = 5; i < 10; ++i) { o.setPropertyEx("Hello" + i, "World" + i); } // check contents of dynamic properties assertEquals("World0", o.getDynamicProperty("Hello0")); for (int i = 0; i < 10; ++i) { assertEquals("World" + i, o.getDynamicProperty("Hello" + i)); } // check non-existing dynamic property assertNull (o.getDynamicProperty("NonExistingProperty")); // check that we have 10 dynamic properties, no more, no less. Set<String> dynamicKeys = o.getDynamicPropertyKeys(); assertEquals (10, dynamicKeys.size()); // check that superset dynamic + static also contains dynamic properties assertEquals("World0", o.getPropertyEx("Hello0")); for (int i = 0; i < 10; ++i) { assertEquals("World" + i, o.getPropertyEx("Hello" + i)); } // check setting null property try { o.setStaticProperty(null, new Object()); fail("Setting null property should generate exception"); } catch (NullPointerException e) {} // check setting non string / StaticProperty property try { o.setPropertyEx(new Object(), new Object()); fail("Using key that is not String or StaticProperty should generate exception"); } catch (IllegalArgumentException e) {} // test storage of dynamic properties File temp = File.createTempFile ("dynaprops.test", ".gpml"); temp.deleteOnExit(); // set an id on this element so we can find it back easily String id = o.setGeneratedGraphId(); // store data.writeToXml(temp, false); // and read back Pathway p2 = new Pathway(); p2.readFromXml(temp, true); // get same datanode back PathwayElement o2 = p2.getElementById(id); // check that it still has the dynamic properties after storing / reading assertEquals ("World5", o2.getDynamicProperty("Hello5")); assertEquals ("World3", o2.getPropertyEx("Hello3")); // sanity check: no non-existing properties assertNull (o2.getDynamicProperty("NonExistingProperty")); assertNull (o2.getPropertyEx("NonExistingProperty")); // check that dynamic properties are copied. PathwayElement o3 = o2.copy(); assertEquals ("World7", o3.getPropertyEx("Hello7")); // check that it's a deep copy o2.setDynamicProperty("Hello7", "Something other than 'World7'"); assertEquals ("World7", o3.getPropertyEx("Hello7")); assertEquals ("Something other than 'World7'", o2.getPropertyEx("Hello7")); } public void testColor() { try { o.setColor(null); fail("Shouldn't be able to set color null"); } catch (Exception e) {} } public void testObjectType() { assertEquals ("getObjectType() test", o.getObjectType(), ObjectType.DATANODE); assertEquals (ObjectType.SHAPE, ObjectType.getTagMapping("Shape")); assertNull (ObjectType.getTagMapping("Non-existing tag")); try { PathwayElement.createPathwayElement (null); fail ("Shouldn't be able to create invalid object type"); } catch (NullPointerException e) { } } public void testParent() { // remove data.remove (o); assertNull ("removing object set parents null", o.getParent()); assertEquals (received.size(), 1); assertEquals ("Event type should be DELETED", received.get(0).getType(), PathwayEvent.DELETED); // re-add data.add(o); assertEquals ("adding sets parent", o.getParent(), data); assertEquals (received.size(), 2); assertEquals ("Event type should be ADDED", received.get(1).getType(), PathwayEvent.ADDED); } /** * Test graphRef's and graphId's * */ public void testRef() { assertTrue ("query non-existing list of ref", data.getReferringObjects("abcde").size() == 0); // create link o.setGraphId("1"); l.setStartGraphRef("1"); assertTrue ("reference created", data.getReferringObjects("1").contains(l.getMStart())); l.setStartGraphRef("2"); assertTrue ("reference removed", data.getReferringObjects("1").size() == 0); PathwayElement o2 = PathwayElement.createPathwayElement(ObjectType.DATANODE); data.add (o2); // create link in opposite order o.setGraphId("2"); l.setEndGraphRef("2"); assertTrue ("reference created (2)", data.getReferringObjects("2").contains(l.getMEnd())); } /** * test that Xref and XrefWithSymbol obey the equals contract */ public void testXRefEquals() { Object[] testList = new Object[] { new Xref("1007_at", BioDataSource.AFFY), new Xref("3456", BioDataSource.AFFY), new Xref("1007_at", BioDataSource.ENTREZ_GENE), new Xref ("3456", BioDataSource.ENTREZ_GENE), new Xref ("3456", BioDataSource.ENTREZ_GENE), new XrefWithSymbol("3456", BioDataSource.ENTREZ_GENE, "INSR"), new XrefWithSymbol("3456", BioDataSource.ENTREZ_GENE, "Insulin Receptor"), }; for (int i = 0; i < testList.length; ++i) { Object refi = testList[i]; // equals must be reflexive assertTrue (refi.equals(refi)); // never equal to null assertFalse (refi.equals(null)); } for (int i = 1; i < testList.length; ++i) for (int j = 0; j < i; ++j) { // equals must be symmetric Object refi = testList[i]; Object refj = testList[j]; assertEquals ("Symmetry fails for " + refj + " and " + refi, refi.equals(refj), refj.equals(refi) ); // hashcode contract if (refi.equals(refj)) { assertEquals (refi.hashCode(), refj.hashCode()); } } // equals must be transitive for (int i = 2; i < testList.length; ++i) for (int j = 1; j < i; ++j) for (int k = 0; k < j; ++k) { Object refi = testList[i]; Object refj = testList[j]; Object refk = testList[k]; if (refi.equals (refj) && refj.equals (refk)) { assertTrue (refk.equals (refi)); } if (refj.equals (refk) && refk.equals (refi)) { assertTrue (refi.equals (refj)); } if (refk.equals (refi) && refi.equals (refj)) { assertTrue (refk.equals (refj)); } } } /** * Test for maintaining list of unique id's per Pathway. * */ public void testRefUniq() { String src = "123123"; String s1 = src.substring (3, 6); String s2 = src.substring (0, 3); assertFalse ("s1 should not be the same reference as s2", s1 == s2); assertTrue ("s1 should be equal to s2", s1.equals (s2)); // test for uniqueness o.setGraphId(s1); PathwayElement o2 = PathwayElement.createPathwayElement(ObjectType.DATANODE); data.add (o2); assertSame (o.getParent(), o2.getParent()); assertEquals ("Setting graphId on first element", o.getGraphId(), "123"); try { o2.setGraphId(s2); // try setting the same id again fail("shouldn't be able to set the same id twice"); } catch (IllegalArgumentException e) { } // test random id String x = data.getUniqueGraphId(); try { // test that we can use it as unique id o.setGraphId(x); assertEquals (x, o.getGraphId()); // test that we can't use the same id twice o2.setGraphId(x); fail("shouldn't be able to set the same id twice"); } catch (IllegalArgumentException e) {} // test that a second random id is unique again x = data.getUniqueGraphId(); o2.setGraphId(x); assertEquals (x, o2.getGraphId()); // test setting id first, then parent PathwayElement o3 = PathwayElement.createPathwayElement(ObjectType.DATANODE); x = data.getUniqueGraphId(); o3.setGraphId(x); data.add (o3); assertEquals (o3.getGraphId(), x); try { PathwayElement o4 = PathwayElement.createPathwayElement(ObjectType.DATANODE); // try setting the same id again o4.setGraphId(x); data.add (o4); fail("shouldn't be able to set the same id twice"); } catch (IllegalArgumentException e) {} } public void testRef2() { o.setGraphId("1"); PathwayElement o2 = PathwayElement.createPathwayElement(ObjectType.DATANODE); // note: parent not set yet! o2.setGraphId ("3"); data.add(o2); // reference should now be created assertNull ("default endGraphRef is null", l.getEndGraphRef()); l.setEndGraphRef("3"); assertTrue ("reference created through adding", data.getReferringObjects("3").contains(l.getMEnd())); } public void testXml2007() throws IOException, ConverterException { File testFile = new File (PATHVISIO_BASEDIR, "testData/test.gpml"); assertTrue (testFile.exists()); data.readFromXml(testFile, false); assertTrue ("Loaded a bunch of objects from xml", data.getDataObjects().size() > 20); File temp = File.createTempFile ("data.test", ".gpml"); temp.deleteOnExit(); data.writeToXml(temp, false); } public void testWrongFormat() { try { data.readFromXml(new File (PATHVISIO_BASEDIR, "testData/test.mapp"), false); fail ("Loading wrong format, Exception expected"); } catch (Exception e) {} } public void testXml2008a() throws IOException, ConverterException { File testFile = new File (PATHVISIO_BASEDIR, "testData/test2.gpml"); assertTrue (testFile.exists()); data.readFromXml(testFile, false); assertTrue ("Loaded a bunch of objects from xml", data.getDataObjects().size() > 20); File temp = File.createTempFile ("data.test", ".gpml"); temp.deleteOnExit(); data.writeToXml(temp, false); } // bug 440: valid gpml file is rejected // because it doesn't contain Pathway.Graphics public void testBug440() throws ConverterException { try { data.readFromXml(new File(PATHVISIO_BASEDIR, "testData/nographics-test.gpml"), false); } catch (ConverterException e) { fail ("No converter exception expected"); } } /** * test exporting of .mapp (genmapp format) * Note: this test is only run whenever os.name starts with Windows */ public void testMapp() throws IOException, ConverterException { if (Utils.getOS() == Utils.OS_WINDOWS) { data = new MappFormat().doImport(new File(PATHVISIO_BASEDIR, "testData/test.mapp")); assertTrue ("Loaded a bunch of objects from mapp", data.getDataObjects().size() > 20); File temp = File.createTempFile ("data.test", ".mapp"); temp.deleteOnExit(); data.writeToMapp(temp); try { data = new MappFormat().doImport(new File (PATHVISIO_BASEDIR, "testData/test.gpml")); fail ("Loading wrong format, Exception expected"); } catch (Exception e) {} } } /** * test exporting of .svg */ public void testSvg() throws IOException, ConverterException { data.readFromXml(new File(PATHVISIO_BASEDIR, "testData/test.gpml"), false); assertTrue ("Loaded a bunch of objects from xml", data.getDataObjects().size() > 20); File temp = File.createTempFile ("data.test", ".svg"); temp.deleteOnExit(); data.writeToSvg(temp); } /** * test exporting of .png */ public void testPng() throws IOException, ConverterException { data.readFromXml(new File(PATHVISIO_BASEDIR, "testData/test.gpml"), false); assertTrue ("Loaded a bunch of objects from xml", data.getDataObjects().size() > 20); File temp = File.createTempFile ("data.test", ".png"); temp.deleteOnExit(); BatikImageExporter exporter = new BatikImageExporter(BatikImageExporter.TYPE_PNG); exporter.doExport(temp, data); } /** * test exporting of .png */ public void testPng2() throws IOException, ConverterException { data.readFromXml(new File(PATHVISIO_BASEDIR, "testData/test.gpml"), false); assertTrue ("Loaded a bunch of objects from xml", data.getDataObjects().size() > 20); File temp = File.createTempFile ("data.test", ".png"); temp.deleteOnExit(); RasterImageExporter exporter = new RasterImageExporter(BatikImageExporter.TYPE_PNG); exporter.doExport(temp, data); } /** * test exporting of .pdf */ public void testPdf() throws IOException, ConverterException { data.readFromXml(new File(PATHVISIO_BASEDIR, "testData/test.gpml"), false); assertTrue ("Loaded a bunch of objects from xml", data.getDataObjects().size() > 20); File temp = File.createTempFile ("data.test", ".pdf"); temp.deleteOnExit(); BatikImageExporter exporter = new BatikImageExporter(BatikImageExporter.TYPE_PDF); exporter.doExport(temp, data); } /** * test exporting of .txt */ public void testTxt() throws IOException, ConverterException { data.readFromXml(new File(PATHVISIO_BASEDIR, "testData/test.gpml"), false); assertTrue ("Loaded a bunch of objects from xml", data.getDataObjects().size() > 20); File temp = File.createTempFile ("data.test", ".txt"); temp.deleteOnExit(); DataNodeListExporter exporter = new DataNodeListExporter(); exporter.doExport(temp, data); } /** * test exporting of .pwf */ public void testPwf() throws IOException, ConverterException { data.readFromXml(new File(PATHVISIO_BASEDIR, "testData/test.gpml"), false); assertTrue ("Loaded a bunch of objects from xml", data.getDataObjects().size() > 20); File temp = File.createTempFile ("data.test", ".pwf"); temp.deleteOnExit(); EUGeneExporter exporter = new EUGeneExporter(); exporter.doExport(temp, data); } /** * Test that there is one and only one MAPPINFO object * */ public void testMappInfo() { PathwayElement mi; mi = data.getMappInfo(); assertEquals (mi.getObjectType(), ObjectType.MAPPINFO); assertTrue (data.getDataObjects().contains(mi)); assertNotNull (mi); // test that adding a new mappinfo object replaces the old one. PathwayElement mi2 = PathwayElement.createPathwayElement(ObjectType.MAPPINFO); data.add (mi2); assertSame ("MappInfo should be replaced", data.getMappInfo(), mi2); assertNotSame ("Old MappInfo should be gone", data.getMappInfo(), mi); assertNull ("Old MappInfo should not have a parent anymore", mi.getParent()); assertSame ("New MappInfo should now have a parent", mi2.getParent(), data); mi = data.getMappInfo(); try { data.remove(mi); fail ("Shouldn't be able to remove mappinfo object!"); } catch (IllegalArgumentException e) {} } /** * Test that there is one and only one MAPPINFO object * */ public void testInfoBox() { PathwayElement ib; ib = data.getInfoBox(); assertTrue (data.getDataObjects().contains(ib)); assertNotNull (ib); assertEquals (ib.getObjectType(), ObjectType.INFOBOX); PathwayElement ib2 = PathwayElement.createPathwayElement(ObjectType.INFOBOX); data.add (ib2); assertSame ("Infobox should be replaced", data.getInfoBox(), ib2); assertNotSame ("Old Infobox should be gone", data.getInfoBox(), ib); assertNull ("Old Infobox should not have a parent anymore", ib.getParent()); assertSame ("New Infobox should now have a parent", ib2.getParent(), data); ib = data.getMappInfo(); try { data.remove(ib); fail ("Shouldn't be able to remove mappinfo object!"); } catch (IllegalArgumentException e) {} } public void testValidator() throws IOException { File tmp = File.createTempFile("test", ".gpml"); o.setMCenterX(50.0); o.setMCenterY(50.0); o.setInitialSize(); o.setGraphId(data.getUniqueGraphId()); PathwayElement o2 = PathwayElement.createPathwayElement (ObjectType.LINE); o2.setMStartX(10.0); o2.setMStartY(10.0); o2.setInitialSize(); data.add(o2); PathwayElement o3 = PathwayElement.createPathwayElement (ObjectType.LABEL); o3.setMCenterX(100.0); o3.setMCenterY(50); o3.setGraphId(data.getUniqueGraphId()); data.add(o3); PathwayElement mi; mi = data.getMappInfo(); assertTrue ("Mi shouldn't be null", mi != null); try { data.writeToXml(tmp, false); } catch (ConverterException e) { e.printStackTrace(); fail ("Exception while writing newly created pathway"); } } // event listener // receives events generated on objects o and data public void gmmlObjectModified(PathwayEvent e) { // store all received events received.add(e); } public void gmmlObjectModified(PathwayElementEvent e) { receivedElementEvents.add(e); } public void pathwayModified(PathwayEvent e) { gmmlObjectModified (e); } /** * Dangling references, pointing to nothing, can occur in theory. * These should be removed. */ public void testDanglingRef() throws IOException, ConverterException { // create a dangling ref assertEquals (data.fixReferences(), 0); l.setStartGraphRef("dangle"); File temp = File.createTempFile ("data.test", ".gpml"); temp.deleteOnExit(); assertEquals (data.fixReferences(), 1); assertEquals (data.fixReferences(), 0); // should still validate try { data.writeToXml(temp, true); } catch (ConverterException e) { e.printStackTrace(); fail ("Dangling reference should have been removed"); } } }
package com.fbs.rabbitears.views; import android.content.Context; import android.content.res.Configuration; import android.graphics.Bitmap; import android.media.MediaPlayer; import android.os.Handler; import android.util.AttributeSet; import android.view.SurfaceHolder; import android.view.SurfaceView; import android.view.View; import android.view.ViewGroup; import android.widget.Button; import android.widget.ImageButton; import android.widget.SeekBar; /** * Media Streamer * * A surface view for streaming media over the network */ public class MediaStreamer extends SurfaceView implements SurfaceHolder.Callback, SeekBar.OnSeekBarChangeListener, Button.OnClickListener { private MediaPlayer player; private Handler updateHandler; private Runnable seekUpdater; private String sourceAddress; private String mimeType; private Bitmap playButtonImage; private Bitmap pauseButtonImage; private SeekBar trackSeekBar; private ImageButton playPauseButton; // ---------- Initializers ---------- /** * Constructor * @param context Context parent */ public MediaStreamer(Context context) { super(context); init(null, 0); } /** * Constructor * @param context Context parent * @param attrs AttributeSet initial attributes */ public MediaStreamer(Context context, AttributeSet attrs) { super(context, attrs); init(attrs, 0); } /** * Constructor * @param context Context parent * @param attrs AttributeSet initial attributes * @param defStyle int style */ public MediaStreamer(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); init(attrs, defStyle); } /** * Initialize view * @param attrs AttributeSet initial attributes * @param defStyle int style */ private void init(AttributeSet attrs, int defStyle) { // setup initial attributes if (! isInEditMode()) // true if view is being previewed { // setup internal componenets updateHandler = new Handler(); player = new MediaPlayer(); seekUpdater = new Runnable() { @Override public void run() { if (player != null && player.isPlaying()) { int currentPosition = player.getCurrentPosition(); trackSeekBar.setProgress(currentPosition); updateHandler.postDelayed(this, 1000); } } }; } } // ---------- End Initializers ---------- /** * Buffer media streamer * @param link String link source to stream * @param mime String mime type for special processing */ public void buffer(String link, String mime) { this.sourceAddress = link; this.mimeType = mime; this.getHolder().addCallback(this); } /** * Release streamer resources */ public void release() { player.stop(); player.release(); updateHandler.removeCallbacks(seekUpdater); } /** * Adjust stream player scaled aspect ratio using a specified configuration * @param config Configuration new config to adjust to */ public void adjust(Configuration config) { int surfaceWidth; int playerWidth = player.getVideoWidth(); int playerHeight = player.getVideoHeight(); ViewGroup.LayoutParams params = getLayoutParams(); if (config.orientation == Configuration.ORIENTATION_LANDSCAPE) { surfaceWidth = getResources().getDisplayMetrics().heightPixels; } else { surfaceWidth = getResources().getDisplayMetrics().widthPixels; } params.height = (int) (((float)playerHeight / (float)playerWidth) * (float)surfaceWidth); params.width = surfaceWidth; setLayoutParams(params); } /** * Set tracking seek bar * @param seekBar SeekBar to set as tracking seek bar */ public void setTrackSeekBar(SeekBar seekBar) { this.trackSeekBar = seekBar; this.trackSeekBar.setOnSeekBarChangeListener(this); } /** * Set play pause button * @param button ImageButton to set as play/pause toggle button */ public void setPlayPauseButton(ImageButton button) { this.playPauseButton = button; this.playPauseButton.setOnClickListener(this); } /** * Set play image * @param image Bitmap image to set on play/pause button in play mode */ public void setPlayButtonImage(Bitmap image) { this.playButtonImage = image; } /** * Set pause image * @param image Bitmap image to set on play/pause button in pause mode */ public void setPauseButtonImage(Bitmap image) { this.pauseButtonImage = image; } // ---------- Listeners ---------- /** * Prepare media player on initial creation * @param surfaceHolder SurfaceHolder created surface */ @Override public void surfaceCreated(SurfaceHolder surfaceHolder) { try { if (! player.isPlaying()) { player.setDataSource(sourceAddress); player.prepare(); adjust(getResources().getConfiguration()); trackSeekBar.setMax(player.getDuration()); updateHandler.post(seekUpdater); onClick(playPauseButton); // play it on prepare } } catch (Exception e) { } } /** * Reset media display on change * @param surfaceHolder SurfaceHolder changed surface * @param format int change format * @param width int changed width * @param height int changed height */ @Override public void surfaceChanged(SurfaceHolder surfaceHolder, int format, int width, int height) { player.setDisplay(surfaceHolder); } @Override public void surfaceDestroyed(SurfaceHolder surfaceHolder) { // unused } /** * Toggle play/pause on click * @param view View button clicked */ @Override public void onClick(View view) { if (view.equals(playPauseButton)) { if (player.isPlaying()) { playPauseButton.setImageBitmap(playButtonImage); player.pause(); updateHandler.removeCallbacks(seekUpdater); } else { playPauseButton.setImageBitmap(pauseButtonImage); player.start(); updateHandler.post(seekUpdater); } } } /** * Seek to position in media player on seek change from user * @param seekBar SeekBar changed * @param progress int changed progress * @param fromUser True if event was triggered from user interaction, false if not */ @Override public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { if (player != null && fromUser) { player.seekTo(progress); } } @Override public void onStartTrackingTouch(SeekBar seekBar) { // unused } @Override public void onStopTrackingTouch(SeekBar seekBar) { // unused } // ---------- End Listeners ---------- }
/* * The Alluxio Open Foundation licenses this work under the Apache License, version 2.0 * (the "License"). You may not use this work except in compliance with the License, which is * available at www.apache.org/licenses/LICENSE-2.0 * * This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied, as more fully set forth in the License. * * See the NOTICE file distributed with this work for information regarding copyright ownership. */ package alluxio.client.file; import static org.hamcrest.CoreMatchers.containsString; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.mockito.Mockito.any; import static org.mockito.Mockito.doNothing; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import alluxio.AlluxioURI; import alluxio.Configuration; import alluxio.ConfigurationTestUtils; import alluxio.PropertyKey; import alluxio.TestLoggerRule; import alluxio.client.file.options.CreateDirectoryOptions; import alluxio.client.file.options.CreateFileOptions; import alluxio.client.file.options.DeleteOptions; import alluxio.client.file.options.FreeOptions; import alluxio.client.file.options.GetStatusOptions; import alluxio.client.file.options.ListStatusOptions; import alluxio.client.file.options.LoadMetadataOptions; import alluxio.client.file.options.MountOptions; import alluxio.client.file.options.OpenFileOptions; import alluxio.client.file.options.RenameOptions; import alluxio.client.file.options.SetAttributeOptions; import alluxio.client.file.options.UnmountOptions; import alluxio.wire.FileInfo; import alluxio.wire.LoadMetadataType; import org.junit.After; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.runner.RunWith; import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; import java.util.ArrayList; import java.util.List; /** * Unit test for functionality in {@link BaseFileSystem}. */ @RunWith(PowerMockRunner.class) @PrepareForTest({FileSystemContext.class, FileSystemMasterClient.class}) public final class BaseFileSystemTest { private static final RuntimeException EXCEPTION = new RuntimeException("test exception"); private static final String SHOULD_HAVE_PROPAGATED_MESSAGE = "Exception should have been propagated"; @Rule private TestLoggerRule mTestLogger = new TestLoggerRule(); private FileSystem mFileSystem; private FileSystemContext mFileContext; private FileSystemMasterClient mFileSystemMasterClient; private class DummyAlluxioFileSystem extends BaseFileSystem { public DummyAlluxioFileSystem(FileSystemContext context) { super(context); } } /** * Sets up the file system and the context before a test runs. */ @Before public void before() { mFileContext = PowerMockito.mock(FileSystemContext.class); mFileSystem = new DummyAlluxioFileSystem(mFileContext); mFileSystemMasterClient = PowerMockito.mock(FileSystemMasterClient.class); when(mFileContext.acquireMasterClient()).thenReturn(mFileSystemMasterClient); } @After public void after() { ConfigurationTestUtils.resetConfiguration(); } /** * Verifies and releases the master client after a test with a filesystem operation. */ public void verifyFilesystemContextAcquiredAndReleased() { verify(mFileContext).acquireMasterClient(); verify(mFileContext).releaseMasterClient(mFileSystemMasterClient); } /** * Tests the creation of a file via the * {@link BaseFileSystem#createFile(AlluxioURI, CreateFileOptions)} method. */ @Test public void createFile() throws Exception { doNothing().when(mFileSystemMasterClient) .createFile(any(AlluxioURI.class), any(CreateFileOptions.class)); URIStatus status = new URIStatus(new FileInfo()); AlluxioURI file = new AlluxioURI("/file"); GetStatusOptions getStatusOptions = GetStatusOptions.defaults().setLoadMetadataType( LoadMetadataType.Never); when(mFileSystemMasterClient.getStatus(file, getStatusOptions)).thenReturn(status); CreateFileOptions options = CreateFileOptions.defaults(); FileOutStream out = mFileSystem.createFile(file, options); verify(mFileSystemMasterClient).createFile(file, options); assertEquals(out.mUri, file); verifyFilesystemContextAcquiredAndReleased(); } /** * Ensures that an exception is propagated correctly when creating a file system. */ @Test public void createException() throws Exception { doThrow(EXCEPTION).when(mFileSystemMasterClient) .createFile(any(AlluxioURI.class), any(CreateFileOptions.class)); CreateFileOptions options = CreateFileOptions.defaults(); try { mFileSystem.createFile(new AlluxioURI("/"), options); fail(SHOULD_HAVE_PROPAGATED_MESSAGE); } catch (Exception e) { assertSame(EXCEPTION, e); } verifyFilesystemContextAcquiredAndReleased(); } /** * Tests for the {@link BaseFileSystem#delete(AlluxioURI, DeleteOptions)} method. */ @Test public void delete() throws Exception { AlluxioURI file = new AlluxioURI("/file"); DeleteOptions deleteOptions = DeleteOptions.defaults().setRecursive(true); mFileSystem.delete(file, deleteOptions); verify(mFileSystemMasterClient).delete(file, deleteOptions); verifyFilesystemContextAcquiredAndReleased(); } /** * Ensures that an exception is propagated correctly when deleting a file. */ @Test public void deleteException() throws Exception { AlluxioURI file = new AlluxioURI("/file"); DeleteOptions deleteOptions = DeleteOptions.defaults().setRecursive(true); doThrow(EXCEPTION).when(mFileSystemMasterClient).delete(file, deleteOptions); try { mFileSystem.delete(file, deleteOptions); fail(SHOULD_HAVE_PROPAGATED_MESSAGE); } catch (Exception e) { assertSame(EXCEPTION, e); } verifyFilesystemContextAcquiredAndReleased(); } /** * Tests for the {@link BaseFileSystem#free(AlluxioURI, FreeOptions)} method. */ @Test public void free() throws Exception { AlluxioURI file = new AlluxioURI("/file"); FreeOptions freeOptions = FreeOptions.defaults().setRecursive(true); mFileSystem.free(file, freeOptions); verify(mFileSystemMasterClient).free(file, freeOptions); verifyFilesystemContextAcquiredAndReleased(); } /** * Ensures that an exception is propagated correctly when freeing a file. */ @Test public void freeException() throws Exception { AlluxioURI file = new AlluxioURI("/file"); FreeOptions freeOptions = FreeOptions.defaults().setRecursive(true); doThrow(EXCEPTION).when(mFileSystemMasterClient).free(file, freeOptions); try { mFileSystem.free(file, freeOptions); fail(SHOULD_HAVE_PROPAGATED_MESSAGE); } catch (Exception e) { assertSame(EXCEPTION, e); } verifyFilesystemContextAcquiredAndReleased(); } /** * Tests for the {@link BaseFileSystem#getStatus(AlluxioURI, GetStatusOptions)} method. */ @Test public void getStatus() throws Exception { AlluxioURI file = new AlluxioURI("/file"); URIStatus status = new URIStatus(new FileInfo()); GetStatusOptions getStatusOptions = GetStatusOptions.defaults(); when(mFileSystemMasterClient.getStatus(file, getStatusOptions)).thenReturn(status); assertSame(status, mFileSystem.getStatus(file, getStatusOptions)); verify(mFileSystemMasterClient).getStatus(file, getStatusOptions); verifyFilesystemContextAcquiredAndReleased(); } /** * Ensures that an exception is propagated correctly when retrieving information. */ @Test public void getStatusException() throws Exception { AlluxioURI file = new AlluxioURI("/file"); GetStatusOptions getStatusOptions = GetStatusOptions.defaults(); when(mFileSystemMasterClient.getStatus(file, getStatusOptions)).thenThrow(EXCEPTION); try { mFileSystem.getStatus(file, getStatusOptions); fail(SHOULD_HAVE_PROPAGATED_MESSAGE); } catch (Exception e) { assertSame(EXCEPTION, e); } verifyFilesystemContextAcquiredAndReleased(); } /** * Tests for the {@link BaseFileSystem#listStatus(AlluxioURI, ListStatusOptions)} method. */ @Test public void listStatus() throws Exception { AlluxioURI file = new AlluxioURI("/file"); List<URIStatus> infos = new ArrayList<>(); infos.add(new URIStatus(new FileInfo())); ListStatusOptions listStatusOptions = ListStatusOptions.defaults(); when(mFileSystemMasterClient.listStatus(file, listStatusOptions)).thenReturn(infos); assertSame(infos, mFileSystem.listStatus(file, listStatusOptions)); verify(mFileSystemMasterClient).listStatus(file, listStatusOptions); verifyFilesystemContextAcquiredAndReleased(); } /** * Ensures that an exception is propagated correctly when listing the status. */ @Test public void listStatusException() throws Exception { AlluxioURI file = new AlluxioURI("/file"); when(mFileSystemMasterClient.listStatus(file, ListStatusOptions.defaults())) .thenThrow(EXCEPTION); ListStatusOptions listStatusOptions = ListStatusOptions.defaults(); try { mFileSystem.listStatus(file, listStatusOptions); fail(SHOULD_HAVE_PROPAGATED_MESSAGE); } catch (Exception e) { assertSame(EXCEPTION, e); } verifyFilesystemContextAcquiredAndReleased(); } /** * Tests for the {@link BaseFileSystem#loadMetadata(AlluxioURI, LoadMetadataOptions)} * method. */ @Test public void loadMetadata() throws Exception { AlluxioURI file = new AlluxioURI("/file"); LoadMetadataOptions loadMetadataOptions = LoadMetadataOptions.defaults().setRecursive(true); doNothing().when(mFileSystemMasterClient).loadMetadata(file, loadMetadataOptions); mFileSystem.loadMetadata(file, loadMetadataOptions); verify(mFileSystemMasterClient).loadMetadata(file, loadMetadataOptions); verifyFilesystemContextAcquiredAndReleased(); } /** * Ensures that an exception is propagated correctly when loading the metadata. */ @Test public void loadMetadataException() throws Exception { AlluxioURI file = new AlluxioURI("/file"); LoadMetadataOptions loadMetadataOptions = LoadMetadataOptions.defaults().setRecursive(true); doThrow(EXCEPTION).when(mFileSystemMasterClient) .loadMetadata(file, loadMetadataOptions); try { mFileSystem.loadMetadata(file, loadMetadataOptions); fail(SHOULD_HAVE_PROPAGATED_MESSAGE); } catch (Exception e) { assertSame(EXCEPTION, e); } verifyFilesystemContextAcquiredAndReleased(); } /** * Tests for the {@link BaseFileSystem#createDirectory(AlluxioURI, CreateDirectoryOptions)} * method. */ @Test public void createDirectory() throws Exception { AlluxioURI dir = new AlluxioURI("/dir"); CreateDirectoryOptions createDirectoryOptions = CreateDirectoryOptions.defaults(); doNothing().when(mFileSystemMasterClient).createDirectory(dir, createDirectoryOptions); mFileSystem.createDirectory(dir, createDirectoryOptions); verify(mFileSystemMasterClient).createDirectory(dir, createDirectoryOptions); verifyFilesystemContextAcquiredAndReleased(); } /** * Ensures that an exception is propagated correctly when creating a directory. */ @Test public void createDirectoryException() throws Exception { AlluxioURI dir = new AlluxioURI("/dir"); CreateDirectoryOptions createDirectoryOptions = CreateDirectoryOptions.defaults(); doThrow(EXCEPTION).when(mFileSystemMasterClient) .createDirectory(dir, createDirectoryOptions); try { mFileSystem.createDirectory(dir, createDirectoryOptions); fail(SHOULD_HAVE_PROPAGATED_MESSAGE); } catch (Exception e) { assertSame(EXCEPTION, e); } verifyFilesystemContextAcquiredAndReleased(); } /** * Tests for the {@link BaseFileSystem#mount(AlluxioURI, AlluxioURI, MountOptions)} method. */ @Test public void mount() throws Exception { AlluxioURI alluxioPath = new AlluxioURI("/t"); AlluxioURI ufsPath = new AlluxioURI("/u"); MountOptions mountOptions = MountOptions.defaults(); doNothing().when(mFileSystemMasterClient).mount(alluxioPath, ufsPath, mountOptions); mFileSystem.mount(alluxioPath, ufsPath, mountOptions); verify(mFileSystemMasterClient).mount(alluxioPath, ufsPath, mountOptions); verifyFilesystemContextAcquiredAndReleased(); } /** * Ensures that an exception is propagated correctly when mounting a path. */ @Test public void mountException() throws Exception { AlluxioURI alluxioPath = new AlluxioURI("/t"); AlluxioURI ufsPath = new AlluxioURI("/u"); MountOptions mountOptions = MountOptions.defaults(); doThrow(EXCEPTION).when(mFileSystemMasterClient) .mount(alluxioPath, ufsPath, mountOptions); try { mFileSystem.mount(alluxioPath, ufsPath, mountOptions); fail(SHOULD_HAVE_PROPAGATED_MESSAGE); } catch (Exception e) { assertSame(EXCEPTION, e); } verifyFilesystemContextAcquiredAndReleased(); } /** * Tests for the {@link BaseFileSystem#openFile(AlluxioURI, OpenFileOptions)} method to * complete successfully. */ @Test public void openFile() throws Exception { AlluxioURI file = new AlluxioURI("/file"); URIStatus status = new URIStatus(new FileInfo()); GetStatusOptions getStatusOptions = GetStatusOptions.defaults(); when(mFileSystemMasterClient.getStatus(file, getStatusOptions)).thenReturn(status); OpenFileOptions openOptions = OpenFileOptions.defaults(); mFileSystem.openFile(file, openOptions); verify(mFileSystemMasterClient).getStatus(file, getStatusOptions); verifyFilesystemContextAcquiredAndReleased(); } /** * Ensures that an exception is propagated successfully when opening a file. */ @Test public void openException() throws Exception { AlluxioURI file = new AlluxioURI("/file"); GetStatusOptions getStatusOptions = GetStatusOptions.defaults(); when(mFileSystemMasterClient.getStatus(file, getStatusOptions)).thenThrow(EXCEPTION); OpenFileOptions openOptions = OpenFileOptions.defaults(); try { mFileSystem.openFile(file, openOptions); fail(SHOULD_HAVE_PROPAGATED_MESSAGE); } catch (Exception e) { assertSame(EXCEPTION, e); } verifyFilesystemContextAcquiredAndReleased(); } /** * Tests for the {@link BaseFileSystem#rename(AlluxioURI, AlluxioURI, RenameOptions)} * method. */ @Test public void rename() throws Exception { AlluxioURI src = new AlluxioURI("/file"); AlluxioURI dst = new AlluxioURI("/file2"); RenameOptions renameOptions = RenameOptions.defaults(); doNothing().when(mFileSystemMasterClient).rename(src, dst, renameOptions); mFileSystem.rename(src, dst, renameOptions); verify(mFileSystemMasterClient).rename(src, dst, renameOptions); } /** * Ensures that an exception is propagated successfully when renaming a file. */ @Test public void renameException() throws Exception { AlluxioURI src = new AlluxioURI("/file"); AlluxioURI dst = new AlluxioURI("/file2"); RenameOptions renameOptions = RenameOptions.defaults(); doThrow(EXCEPTION).when(mFileSystemMasterClient).rename(src, dst, renameOptions); try { mFileSystem.rename(src, dst, renameOptions); fail(SHOULD_HAVE_PROPAGATED_MESSAGE); } catch (Exception e) { assertSame(EXCEPTION, e); } } /** * Tests for the {@link BaseFileSystem#setAttribute(AlluxioURI, SetAttributeOptions)} method. */ @Test public void setAttribute() throws Exception { AlluxioURI file = new AlluxioURI("/file"); SetAttributeOptions setAttributeOptions = SetAttributeOptions.defaults(); mFileSystem.setAttribute(file, setAttributeOptions); verify(mFileSystemMasterClient).setAttribute(file, setAttributeOptions); } /** * Ensures that an exception is propagated successfully when setting the state. */ @Test public void setStateException() throws Exception { AlluxioURI file = new AlluxioURI("/file"); SetAttributeOptions setAttributeOptions = SetAttributeOptions.defaults(); doThrow(EXCEPTION).when(mFileSystemMasterClient) .setAttribute(file, setAttributeOptions); try { mFileSystem.setAttribute(file, setAttributeOptions); fail(SHOULD_HAVE_PROPAGATED_MESSAGE); } catch (Exception e) { assertSame(EXCEPTION, e); } } /** * Tests for the {@link BaseFileSystem#unmount(AlluxioURI, UnmountOptions)} method. */ @Test public void unmount() throws Exception { AlluxioURI path = new AlluxioURI("/"); UnmountOptions unmountOptions = UnmountOptions.defaults(); doNothing().when(mFileSystemMasterClient).unmount(path); mFileSystem.unmount(path, unmountOptions); verify(mFileSystemMasterClient).unmount(path); } /** * Ensures that an exception is propagated successfully when unmounting a path. */ @Test public void unmountException() throws Exception { AlluxioURI path = new AlluxioURI("/"); UnmountOptions unmountOptions = UnmountOptions.defaults(); doThrow(EXCEPTION).when(mFileSystemMasterClient).unmount(path); try { mFileSystem.unmount(path, unmountOptions); fail(SHOULD_HAVE_PROPAGATED_MESSAGE); } catch (Exception e) { assertSame(EXCEPTION, e); } } /** * Ensures warnings are logged and an exception is thrown when an {@link AlluxioURI} with an * invalid authority is passed. */ @Test public void uriCheckBadAuthority() throws Exception { Configuration.set(PropertyKey.MASTER_HOSTNAME, "localhost"); Configuration.set(PropertyKey.MASTER_RPC_PORT, "19998"); assertBadAuthority("localhost:1234", "Should fail on bad host and port"); assertBadAuthority("zk@localhost:19998", "Should fail on zk authority"); assertTrue(loggedAuthorityWarning()); assertTrue(loggedSchemeWarning()); } /** * Ensures an exception is thrown when an invalid scheme is passed. */ @Test public void uriCheckBadScheme() throws Exception { Configuration.set(PropertyKey.MASTER_HOSTNAME, "localhost"); Configuration.set(PropertyKey.MASTER_RPC_PORT, "19998"); AlluxioURI uri = new AlluxioURI("hdfs://localhost:19998/root"); try { mFileSystem.createDirectory(uri); fail("Should have failed on bad host and port"); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("Scheme hdfs:// in AlluxioURI is invalid")); } } /** * Ensures there is one warning when a URI with a valid scheme and authority is passed. */ @Test public void uriCheckGoodSchemeAndAuthority() throws Exception { Configuration.set(PropertyKey.MASTER_HOSTNAME, "localhost"); Configuration.set(PropertyKey.MASTER_RPC_PORT, "19998"); useUriWithAuthority("localhost:19998"); assertTrue(loggedAuthorityWarning()); assertTrue(loggedSchemeWarning()); } /** * Ensures there is no warnings or errors when an {@link AlluxioURI} without a scheme and * authority is passed. */ @Test public void uriCheckNoSchemeAuthority() throws Exception { Configuration.set(PropertyKey.MASTER_HOSTNAME, "localhost"); Configuration.set(PropertyKey.MASTER_RPC_PORT, "19998"); AlluxioURI uri = new AlluxioURI("/root"); mFileSystem.createDirectory(uri); assertFalse(loggedAuthorityWarning()); assertFalse(loggedSchemeWarning()); } @Test public void uriCheckZkAuthorityMatch() throws Exception { configureZk("a:0,b:0,c:0"); useUriWithAuthority("zk@a:0,b:0,c:0"); // Same authority useUriWithAuthority("zk@a:0;b:0+c:0"); // Same authority, but different delimiters } @Test public void uriCheckZkAuthorityMismatch() throws Exception { configureZk("a:0,b:0,c:0"); assertBadAuthority("a:0,b:0,c:0", "Should fail on non-zk authority"); assertBadAuthority("zk@a:0", "Should fail on zk authority with different addresses"); assertBadAuthority("zk@a:0,b:0,c:1", "Should fail on zk authority with different addresses"); } private void assertBadAuthority(String authority, String failureMessage) throws Exception { try { useUriWithAuthority(authority); fail(failureMessage); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("does not match")); } } private void useUriWithAuthority(String authority) throws Exception { mFileSystem.createDirectory(new AlluxioURI(String.format("alluxio://%s/dir", authority))); } private boolean loggedAuthorityWarning() { return mTestLogger.wasLogged("The URI authority .* is ignored"); } private boolean loggedSchemeWarning() { return mTestLogger.wasLogged("The URI scheme .* is ignored"); } private void configureZk(String addrs) { Configuration.set(PropertyKey.ZOOKEEPER_ENABLED, true); Configuration.set(PropertyKey.ZOOKEEPER_ADDRESS, addrs); } }
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.idea.maven.project; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.ReadAction; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.module.Module; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.ProjectFileIndex; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.LocalFileSystem; import com.intellij.openapi.vfs.VfsUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.util.containers.ArrayListSet; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.MultiMap; import com.intellij.util.containers.Stack; import com.intellij.util.io.PathKt; import gnu.trove.THashSet; import gnu.trove.TObjectHashingStrategy; import org.jdom.Element; import org.jdom.output.Format; import org.jdom.output.XMLOutputter; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.TestOnly; import org.jetbrains.idea.maven.dom.references.MavenFilteredPropertyPsiReferenceProvider; import org.jetbrains.idea.maven.importing.MavenImporter; import org.jetbrains.idea.maven.model.*; import org.jetbrains.idea.maven.server.MavenEmbedderWrapper; import org.jetbrains.idea.maven.server.NativeMavenProjectHolder; import org.jetbrains.idea.maven.utils.*; import java.io.*; import java.nio.file.Path; import java.util.*; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.regex.Pattern; import java.util.zip.CRC32; public class MavenProjectsTree { private static final Logger LOG = Logger.getInstance(MavenProjectsTree.class); private static final String STORAGE_VERSION = MavenProjectsTree.class.getSimpleName() + ".7"; private final Object myStateLock = new Object(); private final ReentrantReadWriteLock myStructureLock = new ReentrantReadWriteLock(); private final Lock myStructureReadLock = myStructureLock.readLock(); private final Lock myStructureWriteLock = myStructureLock.writeLock(); // TODO replace with sets private volatile Set<String> myManagedFilesPaths = new LinkedHashSet<>(); private volatile List<String> myIgnoredFilesPaths = new ArrayList<>(); private volatile List<String> myIgnoredFilesPatterns = new ArrayList<>(); private volatile Pattern myIgnoredFilesPatternsCache; private MavenExplicitProfiles myExplicitProfiles = MavenExplicitProfiles.NONE; private final MavenExplicitProfiles myTemporarilyRemovedExplicitProfiles = new MavenExplicitProfiles(new HashSet<>(), new HashSet<>()); private final List<MavenProject> myRootProjects = new ArrayList<>(); private final Map<MavenProject, MavenProjectTimestamp> myTimestamps = new HashMap<>(); private final MavenWorkspaceMap myWorkspaceMap = new MavenWorkspaceMap(); private final Map<MavenId, MavenProject> myMavenIdToProjectMapping = new HashMap<>(); private final Map<VirtualFile, MavenProject> myVirtualFileToProjectMapping = new HashMap<>(); private final Map<MavenProject, List<MavenProject>> myAggregatorToModuleMapping = new HashMap<>(); private final Map<MavenProject, MavenProject> myModuleToAggregatorMapping = new HashMap<>(); private final List<Listener> myListeners = ContainerUtil.createLockFreeCopyOnWriteList(); private final Project myProject; private final MavenProjectReaderProjectLocator myProjectLocator = new MavenProjectReaderProjectLocator() { @Override public VirtualFile findProjectFile(MavenId coordinates) { MavenProject project = findProject(coordinates); return project == null ? null : project.getFile(); } }; public MavenProjectsTree(Project project) { myProject = project; } @Nullable public static MavenProjectsTree read(Project project, Path file) throws IOException { MavenProjectsTree result = new MavenProjectsTree(project); try (DataInputStream in = new DataInputStream(new BufferedInputStream(PathKt.inputStream(file)))) { try { if (!STORAGE_VERSION.equals(in.readUTF())) return null; result.myManagedFilesPaths = readCollection(in, new LinkedHashSet<>()); result.myIgnoredFilesPaths = readCollection(in, new ArrayList<>()); result.myIgnoredFilesPatterns = readCollection(in, new ArrayList<>()); result.myExplicitProfiles = new MavenExplicitProfiles(readCollection(in, new THashSet<>()), readCollection(in, new THashSet<>())); result.myRootProjects.addAll(readProjectsRecursively(in, result)); } catch (IOException e) { in.close(); PathKt.delete(file); throw e; } catch (Throwable e) { throw new IOException(e); } } return result; } private static <T extends Collection<String>> T readCollection(DataInputStream in, T result) throws IOException { int count = in.readInt(); while (count-- > 0) { result.add(in.readUTF()); } return result; } private static void writeCollection(DataOutputStream out, Collection<String> list) throws IOException { out.writeInt(list.size()); for (String each : list) { out.writeUTF(each); } } private static List<MavenProject> readProjectsRecursively(DataInputStream in, MavenProjectsTree tree) throws IOException { int count = in.readInt(); List<MavenProject> result = new ArrayList<>(count); while (count-- > 0) { MavenProject project = MavenProject.read(in); MavenProjectTimestamp timestamp = MavenProjectTimestamp.read(in); List<MavenProject> modules = readProjectsRecursively(in, tree); if (project != null) { result.add(project); tree.myTimestamps.put(project, timestamp); tree.myVirtualFileToProjectMapping.put(project.getFile(), project); tree.fillIDMaps(project); tree.myAggregatorToModuleMapping.put(project, modules); for (MavenProject eachModule : modules) { tree.myModuleToAggregatorMapping.put(eachModule, project); } } } return result; } public void save(@NotNull Path file) throws IOException { synchronized (myStateLock) { readLock(); try { try (DataOutputStream out = new DataOutputStream(new BufferedOutputStream(PathKt.outputStream(file)))) { out.writeUTF(STORAGE_VERSION); writeCollection(out, myManagedFilesPaths); writeCollection(out, myIgnoredFilesPaths); writeCollection(out, myIgnoredFilesPatterns); writeCollection(out, myExplicitProfiles.getEnabledProfiles()); writeCollection(out, myExplicitProfiles.getDisabledProfiles()); writeProjectsRecursively(out, myRootProjects); } } finally { readUnlock(); } } } private void writeProjectsRecursively(DataOutputStream out, List<MavenProject> list) throws IOException { out.writeInt(list.size()); for (MavenProject each : list) { each.write(out); myTimestamps.get(each).write(out); writeProjectsRecursively(out, getModules(each)); } } public List<String> getManagedFilesPaths() { synchronized (myStateLock) { return new ArrayList<>(myManagedFilesPaths); } } public void resetManagedFilesPathsAndProfiles(List<String> paths, MavenExplicitProfiles profiles) { synchronized (myStateLock) { myManagedFilesPaths = new LinkedHashSet<>(paths); } setExplicitProfiles(profiles); } @TestOnly public void resetManagedFilesAndProfiles(List<VirtualFile> files, MavenExplicitProfiles profiles) { resetManagedFilesPathsAndProfiles(MavenUtil.collectPaths(files), profiles); } public void addManagedFilesWithProfiles(List<VirtualFile> files, MavenExplicitProfiles profiles) { List<String> newFiles; MavenExplicitProfiles newProfiles; synchronized (myStateLock) { newFiles = new ArrayList<>(myManagedFilesPaths); newFiles.addAll(MavenUtil.collectPaths(files)); newProfiles = myExplicitProfiles.clone(); newProfiles.getEnabledProfiles().addAll(profiles.getEnabledProfiles()); newProfiles.getDisabledProfiles().addAll(profiles.getDisabledProfiles()); } resetManagedFilesPathsAndProfiles(newFiles, newProfiles); } public void removeManagedFiles(List<VirtualFile> files) { synchronized (myStateLock) { myManagedFilesPaths.removeAll(MavenUtil.collectPaths(files)); } } public List<VirtualFile> getExistingManagedFiles() { List<VirtualFile> result = new ArrayList<>(); for (String path : getManagedFilesPaths()) { VirtualFile f = LocalFileSystem.getInstance().refreshAndFindFileByPath(path); if (f != null) result.add(f); } return result; } public List<String> getIgnoredFilesPaths() { synchronized (myStateLock) { return new ArrayList<>(myIgnoredFilesPaths); } } public void setIgnoredFilesPaths(final List<String> paths) { doChangeIgnoreStatus(() -> myIgnoredFilesPaths = new ArrayList<>(paths)); } public void removeIgnoredFilesPaths(final Collection<String> paths) { doChangeIgnoreStatus(() -> myIgnoredFilesPaths.removeAll(paths)); } public boolean getIgnoredState(MavenProject project) { synchronized (myStateLock) { return myIgnoredFilesPaths.contains(project.getPath()); } } public void setIgnoredState(List<MavenProject> projects, boolean ignored) { setIgnoredState(projects, ignored, false); } public void setIgnoredState(List<MavenProject> projects, boolean ignored, boolean fromImport) { doSetIgnoredState(projects, ignored, fromImport); } private void doSetIgnoredState(List<MavenProject> projects, final boolean ignored, boolean fromImport) { final List<String> paths = MavenUtil.collectPaths(MavenUtil.collectFiles(projects)); doChangeIgnoreStatus(() -> { if (ignored) { myIgnoredFilesPaths.addAll(paths); } else { myIgnoredFilesPaths.removeAll(paths); } }, fromImport); } public List<String> getIgnoredFilesPatterns() { synchronized (myStateLock) { return new ArrayList<>(myIgnoredFilesPatterns); } } public void setIgnoredFilesPatterns(final List<String> patterns) { doChangeIgnoreStatus(() -> { myIgnoredFilesPatternsCache = null; myIgnoredFilesPatterns = new ArrayList<>(patterns); }); } private void doChangeIgnoreStatus(Runnable runnable) { doChangeIgnoreStatus(runnable, false); } private void doChangeIgnoreStatus(Runnable runnable, boolean fromImport) { List<MavenProject> ignoredBefore; List<MavenProject> ignoredAfter; synchronized (myStateLock) { ignoredBefore = getIgnoredProjects(); runnable.run(); ignoredAfter = getIgnoredProjects(); } List<MavenProject> ignored = new ArrayList<>(ignoredAfter); ignored.removeAll(ignoredBefore); List<MavenProject> unignored = new ArrayList<>(ignoredBefore); unignored.removeAll(ignoredAfter); if (ignored.isEmpty() && unignored.isEmpty()) return; fireProjectsIgnoredStateChanged(ignored, unignored, fromImport); } private List<MavenProject> getIgnoredProjects() { List<MavenProject> result = new ArrayList<>(); for (MavenProject each : getProjects()) { if (isIgnored(each)) result.add(each); } return result; } public boolean isIgnored(MavenProject project) { String path = project.getPath(); synchronized (myStateLock) { return myIgnoredFilesPaths.contains(path) || matchesIgnoredFilesPatterns(path); } } private boolean matchesIgnoredFilesPatterns(String path) { synchronized (myStateLock) { if (myIgnoredFilesPatternsCache == null) { myIgnoredFilesPatternsCache = Pattern.compile(Strings.translateMasks(myIgnoredFilesPatterns)); } return myIgnoredFilesPatternsCache.matcher(path).matches(); } } public MavenExplicitProfiles getExplicitProfiles() { synchronized (myStateLock) { return myExplicitProfiles.clone(); } } public void setExplicitProfiles(MavenExplicitProfiles explicitProfiles) { synchronized (myStateLock) { myExplicitProfiles = explicitProfiles.clone(); } fireProfilesChanged(); } private void updateExplicitProfiles() { Collection<String> available = getAvailableProfiles(); synchronized (myStateLock) { updateExplicitProfiles(myExplicitProfiles.getEnabledProfiles(), myTemporarilyRemovedExplicitProfiles.getEnabledProfiles(), available); updateExplicitProfiles(myExplicitProfiles.getDisabledProfiles(), myTemporarilyRemovedExplicitProfiles.getDisabledProfiles(), available); } } private static void updateExplicitProfiles(Collection<String> explicitProfiles, Collection<String> temporarilyRemovedExplicitProfiles, Collection<String> available) { Collection<String> removedProfiles = new THashSet<>(explicitProfiles); removedProfiles.removeAll(available); temporarilyRemovedExplicitProfiles.addAll(removedProfiles); Collection<String> restoredProfiles = new THashSet<>(temporarilyRemovedExplicitProfiles); restoredProfiles.retainAll(available); temporarilyRemovedExplicitProfiles.removeAll(restoredProfiles); explicitProfiles.removeAll(removedProfiles); explicitProfiles.addAll(restoredProfiles); } public Collection<String> getAvailableProfiles() { Collection<String> res = new THashSet<>(); for (MavenProject each : getProjects()) { res.addAll(each.getProfilesIds()); } return res; } public Collection<Pair<String, MavenProfileKind>> getProfilesWithStates() { Collection<Pair<String, MavenProfileKind>> result = new ArrayListSet<>(); Collection<String> available = new THashSet<>(); Collection<String> active = new THashSet<>(); for (MavenProject each : getProjects()) { available.addAll(each.getProfilesIds()); active.addAll(each.getActivatedProfilesIds().getEnabledProfiles()); } Collection<String> enabledProfiles = getExplicitProfiles().getEnabledProfiles(); Collection<String> disabledProfiles = getExplicitProfiles().getDisabledProfiles(); for (String each : available) { MavenProfileKind state; if (disabledProfiles.contains(each)) { state = MavenProfileKind.NONE; } else if (enabledProfiles.contains(each)) { state = MavenProfileKind.EXPLICIT; } else if (active.contains(each)) { state = MavenProfileKind.IMPLICIT; } else { state = MavenProfileKind.NONE; } result.add(Pair.create(each, state)); } return result; } public void updateAll(boolean force, MavenGeneralSettings generalSettings, MavenProgressIndicator process) { List<VirtualFile> managedFiles = getExistingManagedFiles(); MavenExplicitProfiles explicitProfiles = getExplicitProfiles(); MavenProjectReader projectReader = new MavenProjectReader(myProject); update(managedFiles, true, force, explicitProfiles, projectReader, generalSettings, process); List<VirtualFile> obsoleteFiles = getRootProjectsFiles(); obsoleteFiles.removeAll(managedFiles); delete(projectReader, obsoleteFiles, explicitProfiles, generalSettings, process); } public void update(Collection<VirtualFile> files, boolean force, MavenGeneralSettings generalSettings, MavenProgressIndicator process) { update(files, false, force, getExplicitProfiles(), new MavenProjectReader(myProject), generalSettings, process); } private void update(Collection<VirtualFile> files, boolean recursive, boolean force, MavenExplicitProfiles explicitProfiles, MavenProjectReader projectReader, MavenGeneralSettings generalSettings, MavenProgressIndicator process) { if (files.isEmpty()) return; UpdateContext updateContext = new UpdateContext(); Stack<MavenProject> updateStack = new Stack<>(); for (VirtualFile each : files) { MavenProject mavenProject = findProject(each); if (mavenProject == null) { doAdd(each, recursive, explicitProfiles, updateContext, updateStack, projectReader, generalSettings, process); } else { doUpdate(mavenProject, findAggregator(mavenProject), false, recursive, force, explicitProfiles, updateContext, updateStack, projectReader, generalSettings, process); } } updateExplicitProfiles(); updateContext.fireUpdatedIfNecessary(); } private void doAdd(final VirtualFile f, boolean recursuve, MavenExplicitProfiles explicitProfiles, UpdateContext updateContext, Stack<MavenProject> updateStack, MavenProjectReader reader, MavenGeneralSettings generalSettings, MavenProgressIndicator process) { MavenProject newMavenProject = new MavenProject(f); MavenProject intendedAggregator = null; for (MavenProject each : getProjects()) { if (each.getExistingModuleFiles().contains(f)) { intendedAggregator = each; break; } } doUpdate(newMavenProject, intendedAggregator, true, recursuve, false, explicitProfiles, updateContext, updateStack, reader, generalSettings, process); } private void doUpdate(MavenProject mavenProject, MavenProject aggregator, boolean isNew, boolean recursive, boolean force, MavenExplicitProfiles explicitProfiles, UpdateContext updateContext, Stack<MavenProject> updateStack, MavenProjectReader reader, MavenGeneralSettings generalSettings, MavenProgressIndicator process) { if (updateStack.contains(mavenProject)) { MavenLog.LOG.info("Recursion detected in " + mavenProject.getFile()); return; } updateStack.push(mavenProject); process.setText(ProjectBundle.message("maven.reading.pom", mavenProject.getPath())); process.setText2(""); List<MavenProject> prevModules = getModules(mavenProject); Set<MavenProject> prevInheritors = new HashSet<>(); if (!isNew) { prevInheritors.addAll(findInheritors(mavenProject)); } MavenProjectTimestamp timestamp = calculateTimestamp(mavenProject, explicitProfiles, generalSettings); boolean isChanged = force || !timestamp.equals(myTimestamps.get(mavenProject)); MavenProjectChanges changes = force ? MavenProjectChanges.ALL : MavenProjectChanges.NONE; if (isChanged) { writeLock(); try { if (!isNew) { clearIDMaps(mavenProject); } } finally { writeUnlock(); } MavenId oldParentId = mavenProject.getParentId(); changes = changes.mergedWith(mavenProject.read(generalSettings, explicitProfiles, reader, myProjectLocator)); writeLock(); try { myVirtualFileToProjectMapping.put(mavenProject.getFile(), mavenProject); fillIDMaps(mavenProject); } finally { writeUnlock(); } if (!Comparing.equal(oldParentId, mavenProject.getParentId())) { // ensure timestamp reflects actual parent's timestamp timestamp = calculateTimestamp(mavenProject, explicitProfiles, generalSettings); } myTimestamps.put(mavenProject, timestamp); } boolean reconnected = isNew; if (isNew) { connect(aggregator, mavenProject); } else { reconnected = reconnect(aggregator, mavenProject); } if (isChanged || reconnected) { updateContext.update(mavenProject, changes); } List<VirtualFile> existingModuleFiles = mavenProject.getExistingModuleFiles(); List<MavenProject> modulesToRemove = new ArrayList<>(); List<MavenProject> modulesToBecomeRoots = new ArrayList<>(); for (MavenProject each : prevModules) { VirtualFile moduleFile = each.getFile(); if (!existingModuleFiles.contains(moduleFile)) { if (isManagedFile(moduleFile)) { modulesToBecomeRoots.add(each); } else { modulesToRemove.add(each); } } } for (MavenProject each : modulesToRemove) { removeModule(mavenProject, each); doDelete(mavenProject, each, updateContext); prevInheritors.removeAll(updateContext.deletedProjects); } for (MavenProject each : modulesToBecomeRoots) { if (reconnect(null, each)) updateContext.update(each, MavenProjectChanges.NONE); } for (VirtualFile each : existingModuleFiles) { MavenProject module = findProject(each); boolean isNewModule = module == null; if (isNewModule) { module = new MavenProject(each); } else { MavenProject currentAggregator = findAggregator(module); if (currentAggregator != null && currentAggregator != mavenProject) { MavenLog.LOG.info("Module " + each + " is already included into " + mavenProject.getFile()); continue; } } if (isChanged || isNewModule || recursive) { doUpdate(module, mavenProject, isNewModule, recursive, recursive && force, // do not force update modules if only this project was requested to be updated explicitProfiles, updateContext, updateStack, reader, generalSettings, process); } else { if (reconnect(mavenProject, module)) { updateContext.update(module, MavenProjectChanges.NONE); } } } prevInheritors.addAll(findInheritors(mavenProject)); for (MavenProject each : prevInheritors) { doUpdate(each, findAggregator(each), false, false, // no need to go recursively in case of inheritance, only when updating modules false, explicitProfiles, updateContext, updateStack, reader, generalSettings, process); } updateStack.pop(); } private MavenProjectTimestamp calculateTimestamp(final MavenProject mavenProject, final MavenExplicitProfiles explicitProfiles, final MavenGeneralSettings generalSettings) { return ReadAction.compute(() -> { long pomTimestamp = getFileTimestamp(mavenProject.getFile()); MavenProject parent = findParent(mavenProject); long parentLastReadStamp = parent == null ? -1 : parent.getLastReadStamp(); VirtualFile profilesXmlFile = mavenProject.getProfilesXmlFile(); long profilesTimestamp = getFileTimestamp(profilesXmlFile); VirtualFile jvmConfigFile = getConfigFile(mavenProject, MavenConstants.JVM_CONFIG_RELATIVE_PATH); long jvmConfigTimestamp = getFileTimestamp(jvmConfigFile); VirtualFile mavenConfigFile = getConfigFile(mavenProject, MavenConstants.MAVEN_CONFIG_RELATIVE_PATH); long mavenConfigTimestamp = getFileTimestamp(mavenConfigFile); long userSettingsTimestamp = getFileTimestamp(generalSettings.getEffectiveUserSettingsFile()); long globalSettingsTimestamp = getFileTimestamp(generalSettings.getEffectiveGlobalSettingsFile()); int profilesHashCode = explicitProfiles.hashCode(); return new MavenProjectTimestamp(pomTimestamp, parentLastReadStamp, profilesTimestamp, userSettingsTimestamp, globalSettingsTimestamp, profilesHashCode, jvmConfigTimestamp, mavenConfigTimestamp); }); } private static VirtualFile getConfigFile(MavenProject mavenProject, String fileRelativePath) { VirtualFile baseDir = VfsUtil.findFileByIoFile(MavenUtil.getBaseDir(mavenProject.getDirectoryFile()), false); if (baseDir != null) { return baseDir.findFileByRelativePath(fileRelativePath); } return null; } private static long getFileTimestamp(VirtualFile file) { if (file == null || !file.isValid()) return -1; return file.getTimeStamp(); } public boolean isManagedFile(VirtualFile moduleFile) { return isManagedFile(moduleFile.getPath()); } public boolean isManagedFile(String path) { synchronized (myStateLock) { for (String each : myManagedFilesPaths) { if (FileUtil.pathsEqual(each, path)) return true; } return false; } } public boolean isPotentialProject(String path) { if (isManagedFile(path)) return true; for (MavenProject each : getProjects()) { if (FileUtil.pathsEqual(path, each.getPath())) return true; if (each.getModulePaths().contains(path)) return true; } return false; } public void delete(List<VirtualFile> files, MavenGeneralSettings generalSettings, MavenProgressIndicator process) { delete(new MavenProjectReader(myProject), files, getExplicitProfiles(), generalSettings, process); } private void delete(MavenProjectReader projectReader, List<VirtualFile> files, MavenExplicitProfiles explicitProfiles, MavenGeneralSettings generalSettings, MavenProgressIndicator process) { if (files.isEmpty()) return; UpdateContext updateContext = new UpdateContext(); Stack<MavenProject> updateStack = new Stack<>(); Set<MavenProject> inheritorsToUpdate = new THashSet<>(); for (VirtualFile each : files) { MavenProject mavenProject = findProject(each); if (mavenProject == null) return; inheritorsToUpdate.addAll(findInheritors(mavenProject)); doDelete(findAggregator(mavenProject), mavenProject, updateContext); } inheritorsToUpdate.removeAll(updateContext.deletedProjects); for (MavenProject each : inheritorsToUpdate) { doUpdate(each, null, false, false, false, explicitProfiles, updateContext, updateStack, projectReader, generalSettings, process); } updateExplicitProfiles(); updateContext.fireUpdatedIfNecessary(); } private void doDelete(MavenProject aggregator, MavenProject project, UpdateContext updateContext) { for (MavenProject each : getModules(project)) { if (isManagedFile(each.getPath())) { if (reconnect(null, each)) { updateContext.update(each, MavenProjectChanges.NONE); } } else { doDelete(project, each, updateContext); } } writeLock(); try { if (aggregator != null) { removeModule(aggregator, project); } else { myRootProjects.remove(project); } myTimestamps.remove(project); myVirtualFileToProjectMapping.remove(project.getFile()); clearIDMaps(project); myAggregatorToModuleMapping.remove(project); myModuleToAggregatorMapping.remove(project); } finally { writeUnlock(); } updateContext.deleted(project); } private void fillIDMaps(MavenProject mavenProject) { MavenId id = mavenProject.getMavenId(); myWorkspaceMap.register(id, new File(mavenProject.getFile().getPath())); myMavenIdToProjectMapping.put(id, mavenProject); } private void clearIDMaps(MavenProject mavenProject) { MavenId id = mavenProject.getMavenId(); myWorkspaceMap.unregister(id); myMavenIdToProjectMapping.remove(id); } private void connect(MavenProject newAggregator, MavenProject project) { writeLock(); try { if (newAggregator != null) { addModule(newAggregator, project); } else { myRootProjects.add(project); } } finally { writeUnlock(); } } private boolean reconnect(MavenProject newAggregator, MavenProject project) { MavenProject prevAggregator = findAggregator(project); if (prevAggregator == newAggregator) return false; writeLock(); try { if (prevAggregator != null) { removeModule(prevAggregator, project); } else { myRootProjects.remove(project); } if (newAggregator != null) { addModule(newAggregator, project); } else { myRootProjects.add(project); } } finally { writeUnlock(); } return true; } public boolean hasProjects() { readLock(); try { return !myRootProjects.isEmpty(); } finally { readUnlock(); } } public List<MavenProject> getRootProjects() { readLock(); try { return new ArrayList<>(myRootProjects); } finally { readUnlock(); } } private static void updateCrc(CRC32 crc, int x) { crc.update(x & 0xFF); x >>>= 8; crc.update(x & 0xFF); x >>>= 8; crc.update(x & 0xFF); x >>>= 8; crc.update(x); } private static void updateCrc(CRC32 crc, long l) { updateCrc(crc, (int)l); updateCrc(crc, (int)(l >>> 32)); } private static void updateCrc(CRC32 crc, @Nullable String s) { if (s == null) { crc.update(111); } else { updateCrc(crc, s.hashCode()); crc.update(s.length() & 0xFF); } } @NotNull public static Collection<String> getFilterExclusions(MavenProject mavenProject) { Element config = mavenProject.getPluginConfiguration("org.apache.maven.plugins", "maven-resources-plugin"); if (config == null) { return Collections.emptySet(); } final List<String> customNonFilteredExtensions = MavenJDOMUtil.findChildrenValuesByPath(config, "nonFilteredFileExtensions", "nonFilteredFileExtension"); if (customNonFilteredExtensions.isEmpty()) { return Collections.emptySet(); } return Collections.unmodifiableList(customNonFilteredExtensions); } public int getFilterConfigCrc(ProjectFileIndex fileIndex) { ApplicationManager.getApplication().assertReadAccessAllowed(); readLock(); try { final CRC32 crc = new CRC32(); MavenExplicitProfiles profiles = myExplicitProfiles; if (profiles != null) { updateCrc(crc, profiles.hashCode()); } Collection<MavenProject> allProjects = myVirtualFileToProjectMapping.values(); crc.update(allProjects.size() & 0xFF); for (MavenProject mavenProject : allProjects) { VirtualFile pomFile = mavenProject.getFile(); Module module = fileIndex.getModuleForFile(pomFile); if (module == null) continue; if (!Comparing.equal(fileIndex.getContentRootForFile(pomFile), pomFile.getParent())) continue; updateCrc(crc, module.getName()); MavenId mavenId = mavenProject.getMavenId(); updateCrc(crc, mavenId.getGroupId()); updateCrc(crc, mavenId.getArtifactId()); updateCrc(crc, mavenId.getVersion()); MavenId parentId = mavenProject.getParentId(); if (parentId != null) { updateCrc(crc, parentId.getGroupId()); updateCrc(crc, parentId.getArtifactId()); updateCrc(crc, parentId.getVersion()); } updateCrc(crc, mavenProject.getDirectory()); updateCrc(crc, MavenFilteredPropertyPsiReferenceProvider.getDelimitersPattern(mavenProject).pattern()); updateCrc(crc, mavenProject.getModelMap().hashCode()); updateCrc(crc, mavenProject.getResources().hashCode()); updateCrc(crc, mavenProject.getTestResources().hashCode()); updateCrc(crc, getFilterExclusions(mavenProject).hashCode()); updateCrc(crc, mavenProject.getProperties().hashCode()); for (String each : mavenProject.getFilterPropertiesFiles()) { File file = new File(each); updateCrc(crc, file.lastModified()); } XMLOutputter outputter = new XMLOutputter(Format.getCompactFormat()); Writer crcWriter = new Writer() { @Override public void write(char[] cbuf, int off, int len) throws IOException { for (int i = off, end = off + len; i < end; i++) { crc.update(cbuf[i]); } } @Override public void flush() throws IOException { } @Override public void close() throws IOException { } }; try { Element resourcePluginCfg = mavenProject.getPluginConfiguration("org.apache.maven.plugins", "maven-resources-plugin"); if (resourcePluginCfg != null) { outputter.output(resourcePluginCfg, crcWriter); } Element warPluginCfg = mavenProject.getPluginConfiguration("org.apache.maven.plugins", "maven-war-plugin"); if (warPluginCfg != null) { outputter.output(warPluginCfg, crcWriter); } } catch (IOException e) { LOG.error(e); } } return (int)crc.getValue(); } finally { readUnlock(); } } public List<VirtualFile> getRootProjectsFiles() { return MavenUtil.collectFiles(getRootProjects()); } public List<MavenProject> getProjects() { readLock(); try { return new ArrayList<>(myVirtualFileToProjectMapping.values()); } finally { readUnlock(); } } public List<MavenProject> getNonIgnoredProjects() { readLock(); try { List<MavenProject> result = new ArrayList<>(); for (MavenProject each : myVirtualFileToProjectMapping.values()) { if (!isIgnored(each)) result.add(each); } return result; } finally { readUnlock(); } } public List<VirtualFile> getProjectsFiles() { readLock(); try { return new ArrayList<>(myVirtualFileToProjectMapping.keySet()); } finally { readUnlock(); } } @Nullable public MavenProject findProject(VirtualFile f) { readLock(); try { return myVirtualFileToProjectMapping.get(f); } finally { readUnlock(); } } @Nullable public MavenProject findProject(MavenId id) { readLock(); try { return myMavenIdToProjectMapping.get(id); } finally { readUnlock(); } } @Nullable public MavenProject findProject(MavenArtifact artifact) { return findProject(artifact.getMavenId()); } private MavenWorkspaceMap getWorkspaceMap() { readLock(); try { return myWorkspaceMap.copy(); } finally { readUnlock(); } } public MavenProject findAggregator(MavenProject project) { readLock(); try { return myModuleToAggregatorMapping.get(project); } finally { readUnlock(); } } @NotNull public MavenProject findRootProject(@NotNull MavenProject project) { readLock(); try { MavenProject rootProject = project; while (true) { MavenProject aggregator = myModuleToAggregatorMapping.get(rootProject); if (aggregator == null) { return rootProject; } rootProject = aggregator; } } finally { readUnlock(); } } public boolean isRootProject(@NotNull MavenProject project) { readLock(); try { return myModuleToAggregatorMapping.get(project) == null; } finally { readUnlock(); } } public List<MavenProject> getModules(MavenProject aggregator) { readLock(); try { List<MavenProject> modules = myAggregatorToModuleMapping.get(aggregator); return modules == null ? Collections.emptyList() : new ArrayList<>(modules); } finally { readUnlock(); } } private void addModule(MavenProject aggregator, MavenProject module) { writeLock(); try { List<MavenProject> modules = myAggregatorToModuleMapping.get(aggregator); if (modules == null) { modules = new ArrayList<>(); myAggregatorToModuleMapping.put(aggregator, modules); } modules.add(module); myModuleToAggregatorMapping.put(module, aggregator); } finally { writeUnlock(); } } private void removeModule(MavenProject aggregator, MavenProject module) { writeLock(); try { List<MavenProject> modules = myAggregatorToModuleMapping.get(aggregator); if (modules == null) return; modules.remove(module); myModuleToAggregatorMapping.remove(module); } finally { writeUnlock(); } } private MavenProject findParent(MavenProject project) { return findProject(project.getParentId()); } public Collection<MavenProject> findInheritors(MavenProject project) { readLock(); try { List<MavenProject> result = null; MavenId id = project.getMavenId(); for (MavenProject each : myVirtualFileToProjectMapping.values()) { if (each == project) continue; if (id.equals(each.getParentId())) { if (result == null) result = new ArrayList<>(); result.add(each); } } return result == null ? Collections.emptyList() : result; } finally { readUnlock(); } } public List<MavenProject> getDependentProjects(Collection<MavenProject> projects) { readLock(); try { List<MavenProject> result = null; Set<MavenCoordinate> projectIds = new THashSet<>(new MavenCoordinateHashCodeStrategy()); for (MavenProject project : projects) { projectIds.add(project.getMavenId()); } final Set<File> projectPaths = new THashSet<>(FileUtil.FILE_HASHING_STRATEGY); for (MavenProject project : projects) { projectPaths.add(new File(project.getFile().getPath())); } for (MavenProject project : myVirtualFileToProjectMapping.values()) { boolean isDependent = false; Set<String> pathsInStack = project.getModulePaths(); for (final String path : pathsInStack) { if (projectPaths.contains(new File(path))) { isDependent = true; break; } } if (!isDependent) { for (MavenArtifact dep : project.getDependencies()) { if (projectIds.contains(dep)) { isDependent = true; break; } } } if (isDependent) { if (result == null) result = new ArrayList<>(); result.add(project); } } return result == null ? Collections.emptyList() : result; } finally { readUnlock(); } } @TestOnly public void resolve(@NotNull Project project, @NotNull MavenProject mavenProject, @NotNull MavenGeneralSettings generalSettings, @NotNull MavenEmbeddersManager embeddersManager, @NotNull MavenConsole console, @NotNull MavenProgressIndicator process) throws MavenProcessCanceledException { resolve(project, ContainerUtil.list(mavenProject), generalSettings, embeddersManager, console, new ResolveContext(), process); } public void resolve(@NotNull Project project, @NotNull Collection<MavenProject> mavenProjects, @NotNull MavenGeneralSettings generalSettings, @NotNull MavenEmbeddersManager embeddersManager, @NotNull MavenConsole console, @NotNull ResolveContext context, @NotNull MavenProgressIndicator process) throws MavenProcessCanceledException { MultiMap<File, MavenProject> projectMultiMap = groupByBasedir(mavenProjects); for (Map.Entry<File, Collection<MavenProject>> entry : projectMultiMap.entrySet()) { String baseDir = entry.getKey().getPath(); MavenEmbedderWrapper embedder = embeddersManager.getEmbedder(MavenEmbeddersManager.FOR_DEPENDENCIES_RESOLVE, baseDir, baseDir); try { Properties userProperties = new Properties(); for (MavenProject mavenProject : mavenProjects) { for (MavenImporter mavenImporter : mavenProject.getSuitableImporters()) { mavenImporter.customizeUserProperties(project, mavenProject, userProperties); } } embedder.customizeForResolve(getWorkspaceMap(), console, process, generalSettings.isAlwaysUpdateSnapshots(), userProperties); doResolve(project, entry.getValue(), generalSettings, embedder, context, process); } finally { embeddersManager.release(embedder); } } } private void doResolve(@NotNull Project project, @NotNull Collection<MavenProject> mavenProjects, @NotNull MavenGeneralSettings generalSettings, @NotNull MavenEmbedderWrapper embedder, @NotNull ResolveContext context, @NotNull MavenProgressIndicator process) throws MavenProcessCanceledException { if (mavenProjects.isEmpty()) return; process.checkCanceled(); final List<String> names = ContainerUtil.mapNotNull(mavenProjects, p -> p.getDisplayName()); final String text = StringUtil.shortenPathWithEllipsis(StringUtil.join(names, ", "), 200); process.setText(ProjectBundle.message("maven.resolving.pom", text)); process.setText2(""); final MavenExplicitProfiles explicitProfiles = new MavenExplicitProfiles(new LinkedHashSet<>(), new LinkedHashSet<>()); Collection<VirtualFile> files = ContainerUtil.map(mavenProjects, p -> { explicitProfiles.getEnabledProfiles().addAll(p.getActivatedProfilesIds().getEnabledProfiles()); explicitProfiles.getDisabledProfiles().addAll(p.getActivatedProfilesIds().getDisabledProfiles()); return p.getFile(); }); Collection<MavenProjectReaderResult> results = new MavenProjectReader(project).resolveProject( generalSettings, embedder, files, explicitProfiles, myProjectLocator); for (MavenProjectReaderResult result : results) { MavenProject mavenProjectCandidate = null; for (MavenProject mavenProject : mavenProjects) { MavenId mavenId = result.mavenModel.getMavenId(); if (mavenProject.getMavenId().equals(mavenId)) { mavenProjectCandidate = mavenProject; break; } else if (mavenProject.getMavenId().equals(mavenId.getGroupId(), mavenId.getArtifactId())) { mavenProjectCandidate = mavenProject; } } if (mavenProjectCandidate == null) continue; MavenProjectChanges changes = mavenProjectCandidate.set(result, generalSettings, false, result.readingProblems.isEmpty(), false); if (result.nativeMavenProject != null) { for (MavenImporter eachImporter : mavenProjectCandidate.getSuitableImporters()) { eachImporter.resolve(project, mavenProjectCandidate, result.nativeMavenProject, embedder, context); } } fireProjectResolved(Pair.create(mavenProjectCandidate, changes), result.nativeMavenProject); } } public void resolvePlugins(@NotNull MavenProject mavenProject, @NotNull NativeMavenProjectHolder nativeMavenProject, @NotNull MavenEmbeddersManager embeddersManager, @NotNull MavenConsole console, @NotNull MavenProgressIndicator process) throws MavenProcessCanceledException { MavenEmbedderWrapper embedder = embeddersManager.getEmbedder(mavenProject, MavenEmbeddersManager.FOR_PLUGINS_RESOLVE); embedder.customizeForResolve(console, process); embedder.clearCachesFor(mavenProject.getMavenId()); Set<File> filesToRefresh = new HashSet<>(); try { process.setText(ProjectBundle.message("maven.downloading.pom.plugins", mavenProject.getDisplayName())); for (MavenPlugin each : mavenProject.getDeclaredPlugins()) { process.checkCanceled(); Collection<MavenArtifact> artifacts = embedder.resolvePlugin(each, mavenProject.getRemoteRepositories(), nativeMavenProject, false); for (MavenArtifact artifact : artifacts) { File pluginJar = artifact.getFile(); File pluginDir = pluginJar.getParentFile(); if (pluginDir != null) { filesToRefresh.add(pluginDir); // Refresh both *.pom and *.jar files. } } } mavenProject.resetCache(); firePluginsResolved(mavenProject); } finally { if (filesToRefresh.size() > 0) { LocalFileSystem.getInstance().refreshIoFiles(filesToRefresh); } embeddersManager.release(embedder); } } public void resolveFolders(@NotNull final MavenProject mavenProject, @NotNull final MavenImportingSettings importingSettings, @NotNull final MavenEmbeddersManager embeddersManager, @NotNull final MavenConsole console, @NotNull final MavenProgressIndicator process) throws MavenProcessCanceledException { executeWithEmbedder(mavenProject, embeddersManager, MavenEmbeddersManager.FOR_FOLDERS_RESOLVE, console, process, new EmbedderTask() { @Override public void run(MavenEmbedderWrapper embedder) throws MavenProcessCanceledException { process.checkCanceled(); process.setText(ProjectBundle.message("maven.updating.folders.pom", mavenProject.getDisplayName())); process.setText2(""); Pair<Boolean, MavenProjectChanges> resolveResult = mavenProject.resolveFolders(embedder, importingSettings, console); if (resolveResult.first) { fireFoldersResolved(Pair.create(mavenProject, resolveResult.second)); } } }); } public MavenArtifactDownloader.DownloadResult downloadSourcesAndJavadocs(@NotNull Project project, @NotNull Collection<MavenProject> projects, @Nullable Collection<MavenArtifact> artifacts, boolean downloadSources, boolean downloadDocs, @NotNull MavenEmbeddersManager embeddersManager, @NotNull MavenConsole console, @NotNull MavenProgressIndicator process) throws MavenProcessCanceledException { MultiMap<File, MavenProject> projectMultiMap = groupByBasedir(projects); MavenArtifactDownloader.DownloadResult result = new MavenArtifactDownloader.DownloadResult(); for (Map.Entry<File, Collection<MavenProject>> entry : projectMultiMap.entrySet()) { String baseDir = entry.getKey().getPath(); MavenEmbedderWrapper embedder = embeddersManager.getEmbedder(MavenEmbeddersManager.FOR_DOWNLOAD, baseDir, baseDir); try { embedder.customizeForResolve(console, process); MavenArtifactDownloader.DownloadResult result1 = MavenArtifactDownloader.download(project, this, projects, artifacts, downloadSources, downloadDocs, embedder, process); for (MavenProject each : projects) { fireArtifactsDownloaded(each); } result.resolvedDocs.addAll(result1.resolvedDocs); result.resolvedSources.addAll(result1.resolvedSources); result.unresolvedDocs.addAll(result1.unresolvedDocs); result.unresolvedSources.addAll(result1.unresolvedSources); } finally { embeddersManager.release(embedder); } } return result; } @NotNull private MultiMap<File, MavenProject> groupByBasedir(@NotNull Collection<MavenProject> projects) { return ContainerUtil.groupBy(projects, p -> MavenUtil.getBaseDir(findRootProject(p).getDirectoryFile())); } public void executeWithEmbedder(@NotNull MavenProject mavenProject, @NotNull MavenEmbeddersManager embeddersManager, @NotNull Key embedderKind, @NotNull MavenConsole console, @NotNull MavenProgressIndicator process, @NotNull EmbedderTask task) throws MavenProcessCanceledException { MavenEmbedderWrapper embedder = embeddersManager.getEmbedder(mavenProject, embedderKind); embedder.customizeForResolve(getWorkspaceMap(), console, process, false); embedder.clearCachesFor(mavenProject.getMavenId()); try { task.run(embedder); } finally { embeddersManager.release(embedder); } } public <Result> Result visit(Visitor<Result> visitor) { for (MavenProject each : getRootProjects()) { if (visitor.isDone()) break; doVisit(each, visitor); } return visitor.getResult(); } private <Result> void doVisit(MavenProject project, Visitor<Result> visitor) { if (!visitor.isDone() && visitor.shouldVisit(project)) { visitor.visit(project); for (MavenProject each : getModules(project)) { if (visitor.isDone()) break; doVisit(each, visitor); } visitor.leave(project); } } private void writeLock() { myStructureWriteLock.lock(); } private void writeUnlock() { myStructureWriteLock.unlock(); } private void readLock() { myStructureReadLock.lock(); } private void readUnlock() { myStructureReadLock.unlock(); } public void addListener(Listener l) { myListeners.add(l); } private void fireProfilesChanged() { for (Listener each : myListeners) { each.profilesChanged(); } } private void fireProjectsIgnoredStateChanged(@NotNull List<MavenProject> ignored, @NotNull List<MavenProject> unignored, boolean fromImport) { for (Listener each : myListeners) { each.projectsIgnoredStateChanged(ignored, unignored, fromImport); } } private void fireProjectsUpdated(@NotNull List<Pair<MavenProject, MavenProjectChanges>> updated, @NotNull List<MavenProject> deleted) { for (Listener each : myListeners) { each.projectsUpdated(updated, deleted); } } private void fireProjectResolved(@NotNull Pair<MavenProject, MavenProjectChanges> projectWithChanges, @Nullable NativeMavenProjectHolder nativeMavenProject) { for (Listener each : myListeners) { each.projectResolved(projectWithChanges, nativeMavenProject); } } private void firePluginsResolved(@NotNull MavenProject project) { for (Listener each : myListeners) { each.pluginsResolved(project); } } private void fireFoldersResolved(@NotNull Pair<MavenProject, MavenProjectChanges> projectWithChanges) { for (Listener each : myListeners) { each.foldersResolved(projectWithChanges); } } private void fireArtifactsDownloaded(@NotNull MavenProject project) { for (Listener each : myListeners) { each.artifactsDownloaded(project); } } private class UpdateContext { public final Map<MavenProject, MavenProjectChanges> updatedProjectsWithChanges = new LinkedHashMap<>(); public final Set<MavenProject> deletedProjects = new LinkedHashSet<>(); public void update(MavenProject project, MavenProjectChanges changes) { deletedProjects.remove(project); updatedProjectsWithChanges.put(project, changes.mergedWith(updatedProjectsWithChanges.get(project))); } public void deleted(MavenProject project) { updatedProjectsWithChanges.remove(project); deletedProjects.add(project); } public void deleted(Collection<MavenProject> projects) { for (MavenProject each : projects) { deleted(each); } } public void fireUpdatedIfNecessary() { if (updatedProjectsWithChanges.isEmpty() && deletedProjects.isEmpty()) return; List<MavenProject> mavenProjects = deletedProjects.isEmpty() ? Collections.emptyList() : new ArrayList<>(deletedProjects); List<Pair<MavenProject, MavenProjectChanges>> updated = updatedProjectsWithChanges.isEmpty() ? Collections.emptyList() : MavenUtil.mapToList(updatedProjectsWithChanges); fireProjectsUpdated(updated, mavenProjects); } } public interface EmbedderTask { void run(MavenEmbedderWrapper embedder) throws MavenProcessCanceledException; } public abstract static class Visitor<Result> { private Result result; public boolean shouldVisit(MavenProject project) { return true; } public abstract void visit(MavenProject project); public void leave(MavenProject node) { } public void setResult(Result result) { this.result = result; } public Result getResult() { return result; } public boolean isDone() { return result != null; } } public abstract static class SimpleVisitor extends Visitor<Object> { } private static class MavenProjectTimestamp { private final long myPomTimestamp; private final long myParentLastReadStamp; private final long myProfilesTimestamp; private final long myUserSettingsTimestamp; private final long myGlobalSettingsTimestamp; private final long myExplicitProfilesHashCode; private final long myJvmConfigTimestamp; private final long myMavenConfigTimestamp; private MavenProjectTimestamp(long pomTimestamp, long parentLastReadStamp, long profilesTimestamp, long userSettingsTimestamp, long globalSettingsTimestamp, long explicitProfilesHashCode, long jvmConfigTimestamp, long mavenConfigTimestamp) { myPomTimestamp = pomTimestamp; myParentLastReadStamp = parentLastReadStamp; myProfilesTimestamp = profilesTimestamp; myUserSettingsTimestamp = userSettingsTimestamp; myGlobalSettingsTimestamp = globalSettingsTimestamp; myExplicitProfilesHashCode = explicitProfilesHashCode; myJvmConfigTimestamp = jvmConfigTimestamp; myMavenConfigTimestamp = mavenConfigTimestamp; } public static MavenProjectTimestamp read(DataInputStream in) throws IOException { return new MavenProjectTimestamp(in.readLong(), in.readLong(), in.readLong(), in.readLong(), in.readLong(), in.readLong(), in.readLong(), in.readLong()); } public void write(DataOutputStream out) throws IOException { out.writeLong(myPomTimestamp); out.writeLong(myParentLastReadStamp); out.writeLong(myProfilesTimestamp); out.writeLong(myUserSettingsTimestamp); out.writeLong(myGlobalSettingsTimestamp); out.writeLong(myExplicitProfilesHashCode); out.writeLong(myJvmConfigTimestamp); out.writeLong(myMavenConfigTimestamp); } @Override public String toString() { return "(" + myPomTimestamp + ":" + myParentLastReadStamp + ":" + myProfilesTimestamp + ":" + myUserSettingsTimestamp + ":" + myGlobalSettingsTimestamp + ":" + myExplicitProfilesHashCode + ":" + myJvmConfigTimestamp + ":" + myMavenConfigTimestamp + ")"; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; MavenProjectTimestamp timestamp = (MavenProjectTimestamp)o; if (myPomTimestamp != timestamp.myPomTimestamp) return false; if (myParentLastReadStamp != timestamp.myParentLastReadStamp) return false; if (myProfilesTimestamp != timestamp.myProfilesTimestamp) return false; if (myUserSettingsTimestamp != timestamp.myUserSettingsTimestamp) return false; if (myGlobalSettingsTimestamp != timestamp.myGlobalSettingsTimestamp) return false; if (myExplicitProfilesHashCode != timestamp.myExplicitProfilesHashCode) return false; if (myJvmConfigTimestamp != timestamp.myJvmConfigTimestamp) return false; if (myMavenConfigTimestamp != timestamp.myMavenConfigTimestamp) return false; return true; } @Override public int hashCode() { int result = 0; result = 31 * result + (int)(myPomTimestamp ^ (myPomTimestamp >>> 32)); result = 31 * result + (int)(myParentLastReadStamp ^ (myParentLastReadStamp >>> 32)); result = 31 * result + (int)(myProfilesTimestamp ^ (myProfilesTimestamp >>> 32)); result = 31 * result + (int)(myUserSettingsTimestamp ^ (myUserSettingsTimestamp >>> 32)); result = 31 * result + (int)(myGlobalSettingsTimestamp ^ (myGlobalSettingsTimestamp >>> 32)); result = 31 * result + (int)(myExplicitProfilesHashCode ^ (myExplicitProfilesHashCode >>> 32)); result = 31 * result + (int)(myJvmConfigTimestamp ^ (myJvmConfigTimestamp >>> 32)); result = 31 * result + (int)(myMavenConfigTimestamp ^ (myMavenConfigTimestamp >>> 32)); return result; } } public interface Listener extends EventListener { default void profilesChanged() { } default void projectsIgnoredStateChanged(@NotNull List<MavenProject> ignored, @NotNull List<MavenProject> unignored, boolean fromImport) { } default void projectsUpdated(@NotNull List<Pair<MavenProject, MavenProjectChanges>> updated, @NotNull List<MavenProject> deleted) { } default void projectResolved(@NotNull Pair<MavenProject, MavenProjectChanges> projectWithChanges, @Nullable NativeMavenProjectHolder nativeMavenProject) { } default void pluginsResolved(@NotNull MavenProject project) { } default void foldersResolved(@NotNull Pair<MavenProject, MavenProjectChanges> projectWithChanges) { } default void artifactsDownloaded(@NotNull MavenProject project) { } } private static class MavenCoordinateHashCodeStrategy implements TObjectHashingStrategy<MavenCoordinate> { @Override public int computeHashCode(MavenCoordinate object) { String artifactId = object.getArtifactId(); return artifactId == null ? 0 : artifactId.hashCode(); } @Override public boolean equals(MavenCoordinate o1, MavenCoordinate o2) { return Comparing.equal(o1.getArtifactId(), o2.getArtifactId()) && Comparing.equal(o1.getVersion(), o2.getVersion()) && Comparing.equal(o1.getGroupId(), o2.getGroupId()); } } }
/*! ****************************************************************************** * * Pentaho Data Integration * * Copyright (C) 2002-2013 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.run; import java.text.DecimalFormat; import org.pentaho.di.core.Const; import org.pentaho.di.core.KettleEnvironment; import org.pentaho.di.core.Result; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.logging.KettleLogStore; import org.pentaho.di.core.logging.LogLevel; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.RowListener; import org.pentaho.di.trans.step.StepInterface; public class TimedTransRunner { private String filename; private LogLevel logLevel; private long records; private double runTime; private double speed; private Result result; private String rowListenerStep; private RowListener rowListener; private TransMeta transMeta; private DatabaseMeta targetDatabaseMeta; public TimedTransRunner( String filename, LogLevel logLevel, DatabaseMeta newTargetDatabaseMeta, long records ) { this.filename = filename; this.logLevel = logLevel; this.targetDatabaseMeta = newTargetDatabaseMeta; this.records = records; } public boolean run() throws Exception { return runEngine( false ); } public void printTransDescription() { System.out.println(); System.out.println( "Transformation name : " + transMeta.getName() ); System.out.println( "Transformation description : " + Const.NVL( transMeta.getDescription(), "" ) ); System.out.println( "-----------------------------------------------------------------------------------------------------" ); } private static DecimalFormat recordsDF = new DecimalFormat( "###,###,##0" ); private static DecimalFormat runtimeDF = new DecimalFormat( "##0.00" ); private static DecimalFormat speedDF = new DecimalFormat( "#,###,###,##0" ); private void printStats( String prefix, long lines, double runTime, double speed ) { System.out.println( prefix + ", rows: " + recordsDF.format( lines ) + ", runtime: " + runtimeDF.format( runTime ) + "s, speed: " + speedDF.format( speed ) + " rows/s" ); } public boolean runEngine() throws KettleException { return runEngine( false ); } public boolean runEngine( boolean printDescription ) throws KettleException { System.gc(); KettleEnvironment.init(); transMeta = new TransMeta( filename ); transMeta.setVariable( "NR_OF_ROWS", Long.toString( records ) ); if ( printDescription ) { printTransDescription(); } // Replace the TARGET database connection settings with the one provided if ( targetDatabaseMeta != null ) { transMeta.addOrReplaceDatabase( targetDatabaseMeta ); } // OK, now run this transFormation. Trans trans = new Trans( transMeta ); trans.setLogLevel( logLevel ); try { trans.prepareExecution( null ); } catch ( Exception e ) { System.err.println( KettleLogStore.getAppender().getBuffer( trans.getLogChannelId(), true ) ); trans.getLogChannel().logError( "Error preparing / initializing transformation", e ); return false; } if ( !Const.isEmpty( rowListenerStep ) ) { StepInterface step = trans.findRunThread( rowListenerStep ); if ( step != null ) { step.addRowListener( rowListener ); } } long startTime = System.currentTimeMillis(); trans.startThreads(); trans.waitUntilFinished(); long stopTime = System.currentTimeMillis(); result = trans.getResult(); runTime = (double) ( stopTime - startTime ) / 1000; speed = records / ( runTime ); printStats( "V3 results", records, runTime, speed ); return true; } /** * @return the filename */ public String getFilename() { return filename; } /** * @param filename * the filename to set */ public void setFilename( String filename ) { this.filename = filename; } /** * @return the logLevel */ public LogLevel getLogLevel() { return logLevel; } /** * @param logLevel * the logLevel to set */ public void setLogLevel( LogLevel logLevel ) { this.logLevel = logLevel; } /** * @return the records */ public long getRecords() { return records; } /** * @param records * the records to set */ public void setRecords( long records ) { this.records = records; } /** * @return the result */ public Result getNewResult() { return result; } /** * @param result * the result to set */ public void setNewResult( Result result ) { this.result = result; } public void addRowListener( String stepname, RowListener rowListener ) { this.rowListenerStep = stepname; this.rowListener = rowListener; } /** * @return the transMeta */ public TransMeta getTransMeta() { return transMeta; } /** * @param transMeta * the transMeta to set */ public void setTransMeta( TransMeta transMeta ) { this.transMeta = transMeta; } /** * @return the Run Time */ public double getRunTime() { return runTime; } /** * @param runTime * the run time to set */ public void setNewRunTime( double runTime ) { this.runTime = runTime; } /** * @return the speed */ public double speed() { return speed; } /** * @param speed * the speed to set */ public void setSpeed( double speed ) { this.speed = speed; } /** * @return the targetDatabaseMeta */ public DatabaseMeta getTargetDatabaseMeta() { return targetDatabaseMeta; } /** * @param targetDatabaseMeta * the targetDatabaseMeta to set */ public void setTargetDatabaseMeta( DatabaseMeta targetDatabaseMeta ) { this.targetDatabaseMeta = targetDatabaseMeta; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.carbondata.core.load; import java.io.Serializable; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Date; import org.apache.carbondata.common.logging.LogService; import org.apache.carbondata.common.logging.LogServiceFactory; import org.apache.carbondata.core.constants.CarbonCommonConstants; public class LoadMetadataDetails implements Serializable { private static final long serialVersionUID = 1106104914918491724L; private String timestamp; private String loadStatus; private String loadName; private String partitionCount; /** * LOGGER */ private static final LogService LOGGER = LogServiceFactory.getLogService(LoadMetadataDetails.class.getName()); private static final SimpleDateFormat parser = new SimpleDateFormat(CarbonCommonConstants.CARBON_TIMESTAMP); /** * Segment modification or deletion time stamp */ private String modificationOrdeletionTimesStamp; private String loadStartTime; private String mergedLoadName; /** * visibility is used to determine whether to the load is visible or not. */ private String visibility = "true"; /** * To know if the segment is a major compacted segment or not. */ private String majorCompacted; public String getPartitionCount() { return partitionCount; } public void setPartitionCount(String partitionCount) { this.partitionCount = partitionCount; } public String getTimestamp() { return timestamp; } public void setTimestamp(String timestamp) { this.timestamp = timestamp; } public String getLoadStatus() { return loadStatus; } public void setLoadStatus(String loadStatus) { this.loadStatus = loadStatus; } public String getLoadName() { return loadName; } public void setLoadName(String loadName) { this.loadName = loadName; } /** * @return the modificationOrdeletionTimesStamp */ public String getModificationOrdeletionTimesStamp() { return modificationOrdeletionTimesStamp; } /** * @param modificationOrdeletionTimesStamp the modificationOrdeletionTimesStamp to set */ public void setModificationOrdeletionTimesStamp(String modificationOrdeletionTimesStamp) { this.modificationOrdeletionTimesStamp = modificationOrdeletionTimesStamp; } /* (non-Javadoc) * @see java.lang.Object#hashCode() */ @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((loadName == null) ? 0 : loadName.hashCode()); return result; } /* (non-Javadoc) * @see java.lang.Object#equals(java.lang.Object) */ @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (!(obj instanceof LoadMetadataDetails)) { return false; } LoadMetadataDetails other = (LoadMetadataDetails) obj; if (loadName == null) { if (other.loadName != null) { return false; } } else if (!loadName.equals(other.loadName)) { return false; } return true; } /** * @return the startLoadTime */ public String getLoadStartTime() { return loadStartTime; } /** * return loadStartTime * @return */ public Long getLoadStartTimeAsLong() { return getTimeStamp(loadStartTime); } /** * returns load start time as long value * @param loadStartTime * @return */ private Long getTimeStamp(String loadStartTime) { if (loadStartTime.isEmpty()) { return null; } Date dateToStr = null; try { dateToStr = parser.parse(loadStartTime); return dateToStr.getTime() * 1000; } catch (ParseException e) { LOGGER.error("Cannot convert" + loadStartTime + " to Time/Long type value" + e.getMessage()); return null; } } /** * @param loadStartTime */ public void setLoadStartTime(String loadStartTime) { this.loadStartTime = loadStartTime; } /** * @return the mergedLoadName */ public String getMergedLoadName() { return mergedLoadName; } /** * @param mergedLoadName the mergedLoadName to set */ public void setMergedLoadName(String mergedLoadName) { this.mergedLoadName = mergedLoadName; } /** * @return the visibility */ public String getVisibility() { return visibility; } /** * @param visibility the visibility to set */ public void setVisibility(String visibility) { this.visibility = visibility; } /** * Return true if it is a major compacted segment. * @return */ public String isMajorCompacted() { return majorCompacted; } /** * Set true if it is a major compacted segment. * @param majorCompacted */ public void setMajorCompacted(String majorCompacted) { this.majorCompacted = majorCompacted; } }
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.vcs.update; import com.intellij.icons.AllIcons; import com.intellij.openapi.Disposable; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.Pair; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.search.scope.packageSet.NamedScopesHolder; import com.intellij.psi.search.scope.packageSet.PackageSetBase; import com.intellij.ui.SimpleTextAttributes; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.tree.TreeNode; import java.io.File; import java.util.*; /** * author: lesya */ public class GroupTreeNode extends AbstractTreeNode implements Disposable { private final String myName; private final boolean mySupportsDeletion; private final List<String> myFilePaths = new ArrayList<>(); private final Map<String, String> myErrorsMap; private final SimpleTextAttributes myInvalidAttributes; private final Project myProject; private final String myFileGroupId; public GroupTreeNode(@NotNull String name, boolean supportsDeletion, @NotNull SimpleTextAttributes invalidAttributes, @NotNull Project project, @NotNull Map<String, String> errorsMap, String id) { myName = name; mySupportsDeletion = supportsDeletion; myInvalidAttributes = invalidAttributes; myProject = project; myErrorsMap = errorsMap; myFileGroupId = id; } public String getFileGroupId() { return myFileGroupId; } @NotNull @Override public String getName() { return myName; } @Override public Icon getIcon(boolean expanded) { return AllIcons.Nodes.Folder; } @NotNull @Override public Collection<VirtualFile> getVirtualFiles() { ArrayList<VirtualFile> result = new ArrayList<>(); for (int i = 0; i < getChildCount(); i++) { result.addAll(((AbstractTreeNode)getChildAt(i)).getVirtualFiles()); } return result; } @NotNull @Override public Collection<File> getFiles() { ArrayList<File> result = new ArrayList<>(); for (int i = 0; i < getChildCount(); i++) { result.addAll(((AbstractTreeNode)getChildAt(i)).getFiles()); } return result; } @Override protected int getItemsCount() { int result = 0; Enumeration children = children(); while (children.hasMoreElements()) { AbstractTreeNode treeNode = (AbstractTreeNode)children.nextElement(); result += treeNode.getItemsCount(); } return result; } @Override protected boolean showStatistics() { return true; } @NotNull @Override public SimpleTextAttributes getAttributes() { return myFilterAttributes == null ? SimpleTextAttributes.SIMPLE_CELL_ATTRIBUTES : myFilterAttributes; } @Override public boolean getSupportsDeletion() { return mySupportsDeletion; } public void addFilePath(@NotNull String filePath) { myFilePaths.add(filePath); } public void rebuild(boolean groupByPackages, @Nullable Pair<PackageSetBase, NamedScopesHolder> filter, boolean showOnlyFilteredItems) { myFilterAttributes = null; if (containsGroups()) { rebuildGroups(groupByPackages, filter, showOnlyFilteredItems); } else { rebuildFiles(groupByPackages, filter, showOnlyFilteredItems); } } private void rebuildGroups(boolean groupByPackages, @Nullable Pair<PackageSetBase, NamedScopesHolder> filter, boolean showOnlyFilteredItems) { boolean apply = false; for (int i = 0; i < getChildCount(); i++) { GroupTreeNode childGroup = (GroupTreeNode)getChildAt(i); childGroup.rebuild(groupByPackages, filter, showOnlyFilteredItems); apply |= childGroup.myFilterAttributes != null; } applyFilter(apply); } private void rebuildFiles(boolean groupByPackages, @Nullable Pair<PackageSetBase, NamedScopesHolder> filter, boolean showOnlyFilteredItems) { for (int i = getChildCount() - 1; i >= 0; i--) { final TreeNode node = getChildAt(i); if (node instanceof Disposable) { Disposer.dispose((Disposable)node); } } removeAllChildren(); if (groupByPackages) { buildPackages(); acceptFilter(filter, showOnlyFilteredItems); } else { buildFiles(filter, showOnlyFilteredItems); } setTreeModel(myTreeModel); if (myTreeModel != null) { myTreeModel.nodeStructureChanged(this); } } private void buildPackages() { Collection<File> files = new LinkedHashSet<>(); for (final String myFilePath : myFilePaths) { files.add(new File(myFilePath)); } GroupByPackages groupByPackages = new GroupByPackages(files); List<File> roots = groupByPackages.getRoots(); addFiles(this, roots, files, groupByPackages, null); } private void addFiles(@NotNull AbstractTreeNode parentNode, @NotNull List<File> roots, @NotNull final Collection<File> files, @NotNull GroupByPackages groupByPackages, String parentPath) { Collections.sort(roots, (file1, file2) -> { boolean containsFile1 = files.contains(file1); boolean containsFile2 = files.contains(file2); if (containsFile1 == containsFile2) { return file1.getAbsolutePath().compareToIgnoreCase(file2.getAbsolutePath()); } return containsFile1 ? 1 : -1; }); for (final File root : roots) { FileOrDirectoryTreeNode child = files.contains(root) ? new FileTreeNode(root.getAbsolutePath(), myInvalidAttributes, myProject, parentPath) : new DirectoryTreeNode(root.getAbsolutePath(), myProject, parentPath); Disposer.register((Disposable)parentNode, child); parentNode.add(child); addFiles(child, groupByPackages.getChildren(root), files, groupByPackages, child.getFilePath()); } } private void buildFiles(@Nullable Pair<PackageSetBase, NamedScopesHolder> filter, boolean showOnlyFilteredItems) { Collections.sort(myFilePaths, (path1, path2) -> path1.compareToIgnoreCase(path2)); boolean apply = false; for (final String filePath : myFilePaths) { final FileTreeNode child = new FileTreeNode(filePath, myInvalidAttributes, myProject, null); if (filter != null) { if (child.acceptFilter(filter, showOnlyFilteredItems)) { apply = true; } else if (showOnlyFilteredItems) { Disposer.dispose(child); continue; } } final String error = myErrorsMap.get(filePath); if (error != null) { child.setErrorText(error); } add(child); Disposer.register(this, child); } applyFilter(apply); } private boolean containsGroups() { return myFilePaths.isEmpty(); } @Override public void dispose() { } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.beam.sdk.io.hadoop.inputformat; import static com.google.common.base.Preconditions.checkArgument; import com.google.auto.value.AutoValue; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import com.google.common.util.concurrent.AtomicDouble; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.Serializable; import java.math.BigDecimal; import java.math.BigInteger; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.atomic.AtomicLong; import javax.annotation.Nullable; import org.apache.beam.sdk.annotations.Experimental; import org.apache.beam.sdk.coders.CannotProvideCoderException; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.CoderException; import org.apache.beam.sdk.coders.CoderRegistry; import org.apache.beam.sdk.coders.KvCoder; import org.apache.beam.sdk.io.BoundedSource; import org.apache.beam.sdk.io.hadoop.SerializableConfiguration; import org.apache.beam.sdk.io.hadoop.WritableCoder; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.transforms.PTransform; import org.apache.beam.sdk.transforms.SimpleFunction; import org.apache.beam.sdk.transforms.display.DisplayData; import org.apache.beam.sdk.util.CoderUtils; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PBegin; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.TypeDescriptor; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.ObjectWritable; import org.apache.hadoop.io.Writable; import org.apache.hadoop.mapreduce.InputFormat; import org.apache.hadoop.mapreduce.InputSplit; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.RecordReader; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.TaskAttemptID; import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A {@link HadoopInputFormatIO} is a Transform for reading data from any source which * implements Hadoop {@link InputFormat}. For example- Cassandra, Elasticsearch, HBase, Redis, * Postgres etc. {@link HadoopInputFormatIO} has to make several performance trade-offs in * connecting to {@link InputFormat}, so if there is another Beam IO Transform specifically for * connecting to your data source of choice, we would recommend using that one, but this IO * Transform allows you to connect to many data sources that do not yet have a Beam IO Transform. * * <p>You will need to pass a Hadoop {@link Configuration} with parameters specifying how the read * will occur. Many properties of the Configuration are optional, and some are required for certain * {@link InputFormat} classes, but the following properties must be set for all InputFormats: * <ul> * <li>{@code mapreduce.job.inputformat.class}: The {@link InputFormat} class used to connect to * your data source of choice.</li> * <li>{@code key.class}: The key class returned by the {@link InputFormat} in * {@code mapreduce.job.inputformat.class}.</li> * <li>{@code value.class}: The value class returned by the {@link InputFormat} in * {@code mapreduce.job.inputformat.class}.</li> * </ul> * For example: * * <pre> * { * Configuration myHadoopConfiguration = new Configuration(false); * // Set Hadoop InputFormat, key and value class in configuration * myHadoopConfiguration.setClass(&quot;mapreduce.job.inputformat.class&quot;, * MyDbInputFormatClass, InputFormat.class); * myHadoopConfiguration.setClass(&quot;key.class&quot;, MyDbInputFormatKeyClass, Object.class); * myHadoopConfiguration.setClass(&quot;value.class&quot;, * MyDbInputFormatValueClass, Object.class); * } * </pre> * * <p>You will need to check to see if the key and value classes output by the {@link InputFormat} * have a Beam {@link Coder} available. If not, you can use withKeyTranslation/withValueTranslation * to specify a method transforming instances of those classes into another class that is supported * by a Beam {@link Coder}. These settings are optional and you don't need to specify translation * for both key and value. If you specify a translation, you will need to make sure the K or V of * the read transform match the output type of the translation. * * <p>You will need to set appropriate InputFormat key and value class (i.e. "key.class" and * "value.class") in Hadoop {@link Configuration}. If you set different InputFormat key or * value class than InputFormat's actual key or value class then, it may result in an error like * "unexpected extra bytes after decoding" while the decoding process of key/value object happens. * Hence, it is important to set appropriate InputFormat key and value class. * * <h3>Reading using {@link HadoopInputFormatIO}</h3> * * <pre> * {@code * Pipeline p = ...; // Create pipeline. * // Read data only with Hadoop configuration. * p.apply("read", * HadoopInputFormatIO.<InputFormatKeyClass, InputFormatKeyClass>read() * .withConfiguration(myHadoopConfiguration); * } * // Read data with configuration and key translation (Example scenario: Beam Coder is not * available for key class hence key translation is required.). * SimpleFunction&lt;InputFormatKeyClass, MyKeyClass&gt; myOutputKeyType = * new SimpleFunction&lt;InputFormatKeyClass, MyKeyClass&gt;() { * public MyKeyClass apply(InputFormatKeyClass input) { * // ...logic to transform InputFormatKeyClass to MyKeyClass * } * }; * </pre> * * <pre> * {@code * p.apply("read", * HadoopInputFormatIO.<MyKeyClass, InputFormatKeyClass>read() * .withConfiguration(myHadoopConfiguration) * .withKeyTranslation(myOutputKeyType); * } * </pre> * * <p>// Read data with configuration and value translation (Example scenario: Beam Coder is not * available for value class hence value translation is required.). * * <pre> * {@code * SimpleFunction&lt;InputFormatValueClass, MyValueClass&gt; myOutputValueType = * new SimpleFunction&lt;InputFormatValueClass, MyValueClass&gt;() { * public MyValueClass apply(InputFormatValueClass input) { * // ...logic to transform InputFormatValueClass to MyValueClass * } * }; * } * </pre> * * <pre> * {@code * p.apply("read", * HadoopInputFormatIO.<InputFormatKeyClass, MyValueClass>read() * .withConfiguration(myHadoopConfiguration) * .withValueTranslation(myOutputValueType); * } * </pre> */ @Experimental(Experimental.Kind.SOURCE_SINK) public class HadoopInputFormatIO { private static final Logger LOG = LoggerFactory.getLogger(HadoopInputFormatIO.class); /** * Creates an uninitialized {@link HadoopInputFormatIO.Read}. Before use, the {@code Read} must * be initialized with a HadoopInputFormatIO.Read#withConfiguration(HadoopConfiguration) that * specifies the source. A key/value translation may also optionally be specified using * {@link HadoopInputFormatIO.Read#withKeyTranslation}/ * {@link HadoopInputFormatIO.Read#withValueTranslation}. */ public static <K, V> Read<K, V> read() { return new AutoValue_HadoopInputFormatIO_Read.Builder<K, V>().build(); } /** * A {@link PTransform} that reads from any data source which implements Hadoop InputFormat. For * e.g. Cassandra, Elasticsearch, HBase, Redis, Postgres, etc. See the class-level Javadoc on * {@link HadoopInputFormatIO} for more information. * @param <K> Type of keys to be read. * @param <V> Type of values to be read. * @see HadoopInputFormatIO */ @AutoValue public abstract static class Read<K, V> extends PTransform<PBegin, PCollection<KV<K, V>>> { // Returns the Hadoop Configuration which contains specification of source. @Nullable public abstract SerializableConfiguration getConfiguration(); @Nullable public abstract SimpleFunction<?, K> getKeyTranslationFunction(); @Nullable public abstract SimpleFunction<?, V> getValueTranslationFunction(); @Nullable public abstract TypeDescriptor<K> getKeyTypeDescriptor(); @Nullable public abstract TypeDescriptor<V> getValueTypeDescriptor(); @Nullable public abstract TypeDescriptor<?> getinputFormatClass(); @Nullable public abstract TypeDescriptor<?> getinputFormatKeyClass(); @Nullable public abstract TypeDescriptor<?> getinputFormatValueClass(); abstract Builder<K, V> toBuilder(); @AutoValue.Builder abstract static class Builder<K, V> { abstract Builder<K, V> setConfiguration(SerializableConfiguration configuration); abstract Builder<K, V> setKeyTranslationFunction(SimpleFunction<?, K> function); abstract Builder<K, V> setValueTranslationFunction(SimpleFunction<?, V> function); abstract Builder<K, V> setKeyTypeDescriptor(TypeDescriptor<K> keyTypeDescriptor); abstract Builder<K, V> setValueTypeDescriptor(TypeDescriptor<V> valueTypeDescriptor); abstract Builder<K, V> setInputFormatClass(TypeDescriptor<?> inputFormatClass); abstract Builder<K, V> setInputFormatKeyClass(TypeDescriptor<?> inputFormatKeyClass); abstract Builder<K, V> setInputFormatValueClass(TypeDescriptor<?> inputFormatValueClass); abstract Read<K, V> build(); } /** Reads from the source using the options provided by the given configuration. */ public Read<K, V> withConfiguration(Configuration configuration) { validateConfiguration(configuration); TypeDescriptor<?> inputFormatClass = TypeDescriptor.of(configuration.getClass("mapreduce.job.inputformat.class", null)); TypeDescriptor<?> inputFormatKeyClass = TypeDescriptor.of(configuration.getClass("key.class", null)); TypeDescriptor<?> inputFormatValueClass = TypeDescriptor.of(configuration.getClass("value.class", null)); Builder<K, V> builder = toBuilder().setConfiguration(new SerializableConfiguration(configuration)); builder.setInputFormatClass(inputFormatClass); builder.setInputFormatKeyClass(inputFormatKeyClass); builder.setInputFormatValueClass(inputFormatValueClass); /* * Sets the output key class to InputFormat key class if withKeyTranslation() is not called * yet. */ if (getKeyTranslationFunction() == null) { builder.setKeyTypeDescriptor((TypeDescriptor<K>) inputFormatKeyClass); } /* * Sets the output value class to InputFormat value class if withValueTranslation() is not * called yet. */ if (getValueTranslationFunction() == null) { builder.setValueTypeDescriptor((TypeDescriptor<V>) inputFormatValueClass); } return builder.build(); } /** Transforms the keys read from the source using the given key translation function. */ public Read<K, V> withKeyTranslation(SimpleFunction<?, K> function) { checkArgument(function != null, "function can not be null"); // Sets key class to key translation function's output class type. return toBuilder().setKeyTranslationFunction(function) .setKeyTypeDescriptor((TypeDescriptor<K>) function.getOutputTypeDescriptor()).build(); } /** Transforms the values read from the source using the given value translation function. */ public Read<K, V> withValueTranslation(SimpleFunction<?, V> function) { checkArgument(function != null, "function can not be null"); // Sets value class to value translation function's output class type. return toBuilder().setValueTranslationFunction(function) .setValueTypeDescriptor((TypeDescriptor<V>) function.getOutputTypeDescriptor()).build(); } @Override public PCollection<KV<K, V>> expand(PBegin input) { validateTransform(); // Get the key and value coders based on the key and value classes. CoderRegistry coderRegistry = input.getPipeline().getCoderRegistry(); Coder<K> keyCoder = getDefaultCoder(getKeyTypeDescriptor(), coderRegistry); Coder<V> valueCoder = getDefaultCoder(getValueTypeDescriptor(), coderRegistry); HadoopInputFormatBoundedSource<K, V> source = new HadoopInputFormatBoundedSource<>( getConfiguration(), keyCoder, valueCoder, getKeyTranslationFunction(), getValueTranslationFunction()); return input.getPipeline().apply(org.apache.beam.sdk.io.Read.from(source)); } /** * Validates that the mandatory configuration properties such as InputFormat class, InputFormat * key and value classes are provided in the Hadoop configuration. */ private void validateConfiguration(Configuration configuration) { checkArgument(configuration != null, "configuration can not be null"); checkArgument( configuration.get("mapreduce.job.inputformat.class") != null, "Configuration must contain \"mapreduce.job.inputformat.class\""); checkArgument( configuration.get("key.class") != null, "configuration must contain \"key.class\""); checkArgument( configuration.get("value.class") != null, "configuration must contain \"value.class\""); } /** * Validates construction of this transform. */ @VisibleForTesting void validateTransform() { checkArgument(getConfiguration() != null, "withConfiguration() is required"); // Validate that the key translation input type must be same as key class of InputFormat. validateTranslationFunction(getinputFormatKeyClass(), getKeyTranslationFunction(), "Key translation's input type is not same as hadoop InputFormat : %s key class : %s"); // Validate that the value translation input type must be same as value class of InputFormat. validateTranslationFunction(getinputFormatValueClass(), getValueTranslationFunction(), "Value translation's input type is not same as hadoop InputFormat : " + "%s value class : %s"); } /** * Validates translation function given for key/value translation. */ private void validateTranslationFunction(TypeDescriptor<?> inputType, SimpleFunction<?, ?> simpleFunction, String errorMsg) { if (simpleFunction != null) { if (!simpleFunction.getInputTypeDescriptor().equals(inputType)) { throw new IllegalArgumentException( String.format(errorMsg, getinputFormatClass().getRawType(), inputType.getRawType())); } } } /** * Returns the default coder for a given type descriptor. Coder Registry is queried for correct * coder, if not found in Coder Registry, then check if the type descriptor provided is of type * Writable, then WritableCoder is returned, else exception is thrown "Cannot find coder". */ public <T> Coder<T> getDefaultCoder(TypeDescriptor<?> typeDesc, CoderRegistry coderRegistry) { Class classType = typeDesc.getRawType(); try { return (Coder<T>) coderRegistry.getCoder(typeDesc); } catch (CannotProvideCoderException e) { if (Writable.class.isAssignableFrom(classType)) { return (Coder<T>) WritableCoder.of(classType); } throw new IllegalStateException(String.format("Cannot find coder for %s : ", typeDesc) + e.getMessage(), e); } } } /** * Bounded source implementation for {@link HadoopInputFormatIO}. * @param <K> Type of keys to be read. * @param <V> Type of values to be read. */ public static class HadoopInputFormatBoundedSource<K, V> extends BoundedSource<KV<K, V>> implements Serializable { private final SerializableConfiguration conf; private final Coder<K> keyCoder; private final Coder<V> valueCoder; @Nullable private final SimpleFunction<?, K> keyTranslationFunction; @Nullable private final SimpleFunction<?, V> valueTranslationFunction; private final SerializableSplit inputSplit; private transient List<SerializableSplit> inputSplits; private long boundedSourceEstimatedSize = 0; private transient InputFormat<?, ?> inputFormatObj; private transient TaskAttemptContext taskAttemptContext; private static final Set<Class<?>> immutableTypes = new HashSet<>( Arrays.asList( String.class, Byte.class, Short.class, Integer.class, Long.class, Float.class, Double.class, Boolean.class, BigInteger.class, BigDecimal.class)); HadoopInputFormatBoundedSource( SerializableConfiguration conf, Coder<K> keyCoder, Coder<V> valueCoder, @Nullable SimpleFunction<?, K> keyTranslationFunction, @Nullable SimpleFunction<?, V> valueTranslationFunction) { this(conf, keyCoder, valueCoder, keyTranslationFunction, valueTranslationFunction, null); } protected HadoopInputFormatBoundedSource( SerializableConfiguration conf, Coder<K> keyCoder, Coder<V> valueCoder, @Nullable SimpleFunction<?, K> keyTranslationFunction, @Nullable SimpleFunction<?, V> valueTranslationFunction, SerializableSplit inputSplit) { this.conf = conf; this.inputSplit = inputSplit; this.keyCoder = keyCoder; this.valueCoder = valueCoder; this.keyTranslationFunction = keyTranslationFunction; this.valueTranslationFunction = valueTranslationFunction; } public SerializableConfiguration getConfiguration() { return conf; } @Override public void validate() { checkArgument(conf != null, "conf can not be null"); checkArgument(keyCoder != null, "keyCoder can not be null"); checkArgument(valueCoder != null, "valueCoder can not be null"); } @Override public void populateDisplayData(DisplayData.Builder builder) { super.populateDisplayData(builder); Configuration hadoopConfig = getConfiguration().get(); if (hadoopConfig != null) { builder.addIfNotNull(DisplayData.item("mapreduce.job.inputformat.class", hadoopConfig.get("mapreduce.job.inputformat.class")) .withLabel("InputFormat Class")); builder.addIfNotNull(DisplayData.item("key.class", hadoopConfig.get("key.class")) .withLabel("Key Class")); builder.addIfNotNull(DisplayData.item("value.class", hadoopConfig.get("value.class")) .withLabel("Value Class")); } } @Override public List<BoundedSource<KV<K, V>>> split(long desiredBundleSizeBytes, PipelineOptions options) throws Exception { // desiredBundleSizeBytes is not being considered as splitting based on this // value is not supported by inputFormat getSplits() method. if (inputSplit != null) { LOG.info("Not splitting source {} because source is already split.", this); return ImmutableList.of((BoundedSource<KV<K, V>>) this); } computeSplitsIfNecessary(); LOG.info("Generated {} splits. Size of first split is {} ", inputSplits.size(), inputSplits .get(0).getSplit().getLength()); return Lists.transform( inputSplits, serializableInputSplit -> { HadoopInputFormatBoundedSource<K, V> hifBoundedSource = new HadoopInputFormatBoundedSource<>( conf, keyCoder, valueCoder, keyTranslationFunction, valueTranslationFunction, serializableInputSplit); return hifBoundedSource; }); } @Override public long getEstimatedSizeBytes(PipelineOptions po) throws Exception { if (inputSplit == null) { // If there are no splits computed yet, then retrieve the splits. computeSplitsIfNecessary(); return boundedSourceEstimatedSize; } return inputSplit.getSplit().getLength(); } /** * This is a helper function to compute splits. This method will also calculate size of the * data being read. Note: This method is executed exactly once and the splits are retrieved * and cached in this. These splits are further used by split() and * getEstimatedSizeBytes(). */ @VisibleForTesting void computeSplitsIfNecessary() throws IOException, InterruptedException { if (inputSplits != null) { return; } createInputFormatInstance(); List<InputSplit> splits = inputFormatObj.getSplits(Job.getInstance(conf.get())); if (splits == null) { throw new IOException("Error in computing splits, getSplits() returns null."); } if (splits.isEmpty()) { throw new IOException("Error in computing splits, getSplits() returns a empty list"); } boundedSourceEstimatedSize = 0; inputSplits = new ArrayList<>(); for (InputSplit inputSplit : splits) { if (inputSplit == null) { throw new IOException("Error in computing splits, split is null in InputSplits list " + "populated by getSplits() : "); } boundedSourceEstimatedSize += inputSplit.getLength(); inputSplits.add(new SerializableSplit(inputSplit)); } } /** * Creates instance of InputFormat class. The InputFormat class name is specified in the Hadoop * configuration. */ protected void createInputFormatInstance() throws IOException { if (inputFormatObj == null) { try { taskAttemptContext = new TaskAttemptContextImpl(conf.get(), new TaskAttemptID()); inputFormatObj = (InputFormat<?, ?>) conf .get() .getClassByName( conf.get().get("mapreduce.job.inputformat.class")) .newInstance(); /* * If InputFormat explicitly implements interface {@link Configurable}, then setConf() * method of {@link Configurable} needs to be explicitly called to set all the * configuration parameters. For example: InputFormat classes which implement Configurable * are {@link org.apache.hadoop.mapreduce.lib.db.DBInputFormat DBInputFormat}, {@link * org.apache.hadoop.hbase.mapreduce.TableInputFormat TableInputFormat}, etc. */ if (Configurable.class.isAssignableFrom(inputFormatObj.getClass())) { ((Configurable) inputFormatObj).setConf(conf.get()); } } catch (InstantiationException | IllegalAccessException | ClassNotFoundException e) { throw new IOException("Unable to create InputFormat object: ", e); } } } @VisibleForTesting InputFormat<?, ?> getInputFormat(){ return inputFormatObj; } @VisibleForTesting void setInputFormatObj(InputFormat<?, ?> inputFormatObj) { this.inputFormatObj = inputFormatObj; } @Override public Coder<KV<K, V>> getOutputCoder() { return KvCoder.of(keyCoder, valueCoder); } @Override public BoundedReader<KV<K, V>> createReader(PipelineOptions options) throws IOException { this.validate(); if (inputSplit == null) { throw new IOException("Cannot create reader as source is not split yet."); } else { createInputFormatInstance(); return new HadoopInputFormatReader<>( this, keyTranslationFunction, valueTranslationFunction, inputSplit, inputFormatObj, taskAttemptContext); } } /** * BoundedReader for Hadoop InputFormat source. * * @param <K> Type of keys RecordReader emits. * @param <V> Type of values RecordReader emits. */ class HadoopInputFormatReader<T1, T2> extends BoundedSource.BoundedReader<KV<K, V>> { private final HadoopInputFormatBoundedSource<K, V> source; @Nullable private final SimpleFunction<T1, K> keyTranslationFunction; @Nullable private final SimpleFunction<T2, V> valueTranslationFunction; private final SerializableSplit split; private RecordReader<T1, T2> recordReader; private volatile boolean doneReading = false; private AtomicLong recordsReturned = new AtomicLong(); // Tracks the progress of the RecordReader. private AtomicDouble progressValue = new AtomicDouble(); private transient InputFormat<T1, T2> inputFormatObj; private transient TaskAttemptContext taskAttemptContext; private HadoopInputFormatReader(HadoopInputFormatBoundedSource<K, V> source, @Nullable SimpleFunction keyTranslationFunction, @Nullable SimpleFunction valueTranslationFunction, SerializableSplit split, InputFormat inputFormatObj, TaskAttemptContext taskAttemptContext) { this.source = source; this.keyTranslationFunction = keyTranslationFunction; this.valueTranslationFunction = valueTranslationFunction; this.split = split; this.inputFormatObj = inputFormatObj; this.taskAttemptContext = taskAttemptContext; } @Override public HadoopInputFormatBoundedSource<K, V> getCurrentSource() { return source; } @Override public boolean start() throws IOException { try { recordsReturned.set(0L); recordReader = (RecordReader<T1, T2>) inputFormatObj.createRecordReader(split.getSplit(), taskAttemptContext); if (recordReader != null) { recordReader.initialize(split.getSplit(), taskAttemptContext); progressValue.set(getProgress()); if (recordReader.nextKeyValue()) { recordsReturned.incrementAndGet(); doneReading = false; return true; } } else { throw new IOException(String.format("Null RecordReader object returned by %s", inputFormatObj.getClass())); } recordReader = null; } catch (InterruptedException e) { throw new IOException( "Could not read because the thread got interrupted while " + "reading the records with an exception: ", e); } doneReading = true; return false; } @Override public boolean advance() throws IOException { try { progressValue.set(getProgress()); if (recordReader.nextKeyValue()) { recordsReturned.incrementAndGet(); return true; } doneReading = true; } catch (InterruptedException e) { throw new IOException("Unable to read data: ", e); } return false; } @Override public KV<K, V> getCurrent() { K key = null; V value = null; try { // Transform key if translation function is provided. key = transformKeyOrValue((T1) recordReader.getCurrentKey(), keyTranslationFunction, keyCoder); // Transform value if translation function is provided. value = transformKeyOrValue((T2) recordReader.getCurrentValue(), valueTranslationFunction, valueCoder); } catch (IOException | InterruptedException e) { LOG.error("Unable to read data: " + "{}", e); throw new IllegalStateException("Unable to read data: " + "{}", e); } return KV.of(key, value); } /** * Returns the serialized output of transformed key or value object. * @throws ClassCastException * @throws CoderException */ private <T, T3> T3 transformKeyOrValue(T input, @Nullable SimpleFunction<T, T3> simpleFunction, Coder<T3> coder) throws CoderException, ClassCastException { T3 output; if (null != simpleFunction) { output = simpleFunction.apply(input); } else { output = (T3) input; } return cloneIfPossiblyMutable((T3) output, coder); } /** * Beam expects immutable objects, but the Hadoop InputFormats tend to re-use the same object * when returning them. Hence, mutable objects returned by Hadoop InputFormats are cloned. */ private <T> T cloneIfPossiblyMutable(T input, Coder<T> coder) throws CoderException, ClassCastException { // If the input object is not of known immutable type, clone the object. if (!isKnownImmutable(input)) { input = CoderUtils.clone(coder, input); } return input; } /** * Utility method to check if the passed object is of a known immutable type. */ private boolean isKnownImmutable(Object o) { return immutableTypes.contains(o.getClass()); } @Override public void close() throws IOException { LOG.info("Closing reader after reading {} records.", recordsReturned); if (recordReader != null) { recordReader.close(); recordReader = null; } } @Override public Double getFractionConsumed() { if (doneReading) { return 1.0; } else if (recordReader == null || recordsReturned.get() == 0L) { return 0.0; } if (progressValue.get() == 0.0) { return null; } return progressValue.doubleValue(); } /** * Returns RecordReader's progress. * @throws IOException * @throws InterruptedException */ private Double getProgress() throws IOException, InterruptedException { try { float progress = recordReader.getProgress(); return (double) progress < 0 || progress > 1 ? 0.0 : progress; } catch (IOException e) { LOG.error( "Error in computing the fractions consumed as RecordReader.getProgress() throws an " + "exception : " + "{}", e); throw new IOException( "Error in computing the fractions consumed as RecordReader.getProgress() throws an " + "exception : " + e.getMessage(), e); } } @Override public final long getSplitPointsRemaining() { if (doneReading) { return 0; } /** * This source does not currently support dynamic work rebalancing, so remaining parallelism * is always 1. */ return 1; } } } /** * A wrapper to allow Hadoop {@link org.apache.hadoop.mapreduce.InputSplit} to be serialized using * Java's standard serialization mechanisms. */ public static class SerializableSplit implements Serializable { InputSplit inputSplit; public SerializableSplit() {} public SerializableSplit(InputSplit split) { checkArgument(split instanceof Writable, String.format("Split is not of type Writable: %s", split)); this.inputSplit = split; } public InputSplit getSplit() { return inputSplit; } private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { ObjectWritable ow = new ObjectWritable(); ow.setConf(new Configuration(false)); ow.readFields(in); this.inputSplit = (InputSplit) ow.get(); } private void writeObject(ObjectOutputStream out) throws IOException { new ObjectWritable(inputSplit).write(out); } } }
package edu.ucdenver.ccp.nlp.uima.serialization.rdf; /* * #%L * Colorado Computational Pharmacology's nlp module * %% * Copyright (C) 2012 - 2017 Regents of the University of Colorado * %% * Redistribution and use in source and binary forms, with or without modification, * are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * 3. Neither the name of the Regents of the University of Colorado nor the names of its contributors * may be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. * IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED * OF THE POSSIBILITY OF SUCH DAMAGE. * #L% */ import java.io.BufferedWriter; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStreamWriter; import java.util.Collection; import java.util.zip.GZIPOutputStream; import org.apache.uima.UimaContext; import org.apache.uima.analysis_engine.AnalysisEngineDescription; import org.apache.uima.analysis_engine.AnalysisEngineProcessException; import org.apache.uima.cas.CASException; import org.apache.uima.fit.component.JCasAnnotator_ImplBase; import org.apache.uima.fit.descriptor.ConfigurationParameter; import org.apache.uima.fit.factory.AnalysisEngineFactory; import org.apache.uima.fit.util.JCasUtil; import org.apache.uima.jcas.JCas; import org.apache.uima.jcas.tcas.Annotation; import org.apache.uima.resource.ResourceInitializationException; import org.apache.uima.resource.metadata.TypeSystemDescription; import org.apache.uima.util.Logger; import org.openrdf.model.Statement; import org.openrdf.model.URI; import org.openrdf.rio.RDFHandlerException; import org.openrdf.rio.RDFWriter; import edu.ucdenver.ccp.common.file.FileUtil; import edu.ucdenver.ccp.common.file.FileWriterUtil; import edu.ucdenver.ccp.common.reflection.ConstructorUtil; import edu.ucdenver.ccp.datasource.rdfizer.rdf.ice.RdfUtil.RdfFormat; import edu.ucdenver.ccp.nlp.uima.shims.ShimDefaults; import edu.ucdenver.ccp.nlp.uima.util.View_Util; import edu.ucdenver.ccp.uima.shims.annotation.AnnotationDataExtractor; import edu.ucdenver.ccp.uima.shims.document.DocumentMetadataHandler; public class RdfSerialization_AE extends JCasAnnotator_ImplBase { /** * Parameter name used in the UIMA descriptor file for the document metadata * extractor implementation to use */ public static final String PARAM_DOCUMENT_METADATA_HANDLER_CLASS = "documentMetadataHandlerClassName"; @ConfigurationParameter(mandatory = true, description = "name of the DocumentMetadataHandler implementation to use", defaultValue = ShimDefaults.CCP_DOCUMENT_METADATA_HANDLER_CLASS_NAME) private String documentMetadataHandlerClassName; private DocumentMetadataHandler documentMetadataHandler; /** * Parameter name used in the UIMA descriptor file for the annotation data * extractor implementation to use */ public static final String PARAM_ANNOTATION_DATA_EXTRACTOR_CLASS = "annotationDataExtractorClassName"; @ConfigurationParameter(mandatory = true, description = "name of the AnnotationDataExtractor implementation to use", defaultValue = "edu.ucdenver.ccp.nlp.uima.shims.annotation.impl.CcpAnnotationDataExtractor") private String annotationDataExtractorClassName; private AnnotationDataExtractor annotationDataExtractor; /** * Parameter name used in the UIMA descriptor file for the token attribute * extractor implementation to use */ public static final String PARAM_DOCUMENT_RDF_GENERATOR_CLASS = "documentRdfGeneratorClassName"; @ConfigurationParameter(mandatory = true, description = "name of the DocumentRdfGenerator implementation to use") private String documentRdfGeneratorClassName; private DocumentRdfGenerator documentRdfGenerator; /** * Parameter name used in the UIMA descriptor file for the annotation RDF * generator implementation to use */ public static final String PARAM_ANNOTATION_RDF_GENERATOR_CLASS = "annotationRdfGeneratorClassName"; @ConfigurationParameter(mandatory = true, description = "name of the AnnotationRdfGenerator implementation to use") private String annotationRdfGeneratorClassName; private AnnotationRdfGenerator annotationRdfGenerator; // useful when we are serializing relations // /** // * Parameter name used in the UIMA descriptor file for the semantic // statement (RDF) generator // * implementation to use // */ // public static final String PARAM_SEMANTIC_STATEMENT_GENERATOR_CLASS = // "semanticStatementGeneratorClassName"; // @ConfigurationParameter(mandatory = false, description = "name of the // SemanticStatementGenerator implementation to use") // private String semanticStatementGeneratorClassName; // private SemanticStatementGenerator semanticStatementGenerator; /** * Parameter name used in the UIMA descriptor file for the URI Factory * implementation to use */ public static final String PARAM_URI_FACTORY_CLASS = "uriFactoryClassName"; @ConfigurationParameter(mandatory = true, description = "name of the UriFactory implementation to use") private String uriFactoryClassName; private UriFactory uriFactory; public static final String PARAM_OUTPUT_DIRECTORY = "outputDirectory"; @ConfigurationParameter(mandatory = false, description = "") private File outputDirectory; /** * Signifies the file infix to use when naming the output file. */ public static final String PARAM_OUTPUT_FILE_INFIX = "outputFileInfix"; @ConfigurationParameter(mandatory = true, description = "") private String outputFileInfix; /** * If the outputDirectory is not specified, then this view will be used to * search for the source-document-path, which will then be used as the * serialization directory for the output of this annotator. */ public static final String PARAM_SOURCE_VIEW_NAME = "sourceViewName"; @ConfigurationParameter(mandatory = false, description = "") private String sourceViewName; /** * Signifies the view whose document text will be serialized */ public static final String PARAM_OUTPUT_VIEW_NAME = "outputViewName"; @ConfigurationParameter(mandatory = false, description = "") private String outputViewName; /** * If true, the output file will be compressed using gzip */ public static final String PARAM_COMPRESS_OUTPUT_FLAG = "compressOutput"; @ConfigurationParameter(mandatory = false, description = "", defaultValue = "true") private boolean compressOutput; public final static String PARAM_RDF_FORMAT = "rdfFormat"; @ConfigurationParameter(mandatory = true, description = "This string specifies the RDF format to use") private RdfFormat rdfFormat; private Logger logger; @Override public void initialize(UimaContext context) throws ResourceInitializationException { logger = context.getLogger(); super.initialize(context); documentMetadataHandler = (DocumentMetadataHandler) ConstructorUtil .invokeConstructor(documentMetadataHandlerClassName); annotationDataExtractor = (AnnotationDataExtractor) ConstructorUtil .invokeConstructor(annotationDataExtractorClassName); // if (semanticStatementGeneratorClassName != null) { // semanticStatementGenerator = (SemanticStatementGenerator) // ConstructorUtil // .invokeConstructor(semanticStatementGeneratorClassName); // } documentRdfGenerator = (DocumentRdfGenerator) ConstructorUtil.invokeConstructor(documentRdfGeneratorClassName); annotationRdfGenerator = (AnnotationRdfGenerator) ConstructorUtil .invokeConstructor(annotationRdfGeneratorClassName); uriFactory = (UriFactory) ConstructorUtil.invokeConstructor(uriFactoryClassName); } private void writeStatements(Collection<? extends Statement> stmts, RDFWriter writer) { for (Statement s : stmts) { try { writer.handleStatement(s); } catch (RDFHandlerException e) { throw new IllegalStateException(e); } } } private File getOutputFile(JCas jCas, String documentId) throws AnalysisEngineProcessException { String outputFilename = String.format("%s-%s-annots.%s", documentId, outputFileInfix, rdfFormat.defaultFileExtension()); if (compressOutput) { outputFilename += ".gz"; } File outputFile = null; if (outputDirectory != null) { outputFile = new File(outputDirectory, outputFilename); } else { /* * look for a reference to the source file in the specified source * view and use that directory as the output directory */ JCas view = null; if (sourceViewName == null) { view = jCas; } else { try { view = View_Util.getView(jCas, sourceViewName); } catch (CASException e) { throw new AnalysisEngineProcessException(e); } } File sourceDocumentFile = documentMetadataHandler.extractSourceDocumentPath(view); if (sourceDocumentFile != null) { outputFile = new File(sourceDocumentFile.getParentFile(), outputFilename); } } if (outputFile == null) { throw new AnalysisEngineProcessException( "Unable to determine output directory for document text serialization.", null); } FileUtil.mkdir(outputFile.getParentFile()); return outputFile; } @Override public void process(JCas jcas) throws AnalysisEngineProcessException { String documentId = documentMetadataHandler.extractDocumentId(jcas); File outputFile = getOutputFile(jcas, documentId); try (BufferedWriter writer = (compressOutput) ? new BufferedWriter(new OutputStreamWriter(new GZIPOutputStream(new FileOutputStream(outputFile)))) : FileWriterUtil.initBufferedWriter(outputFile)) { RDFWriter rdfWriter = rdfFormat.createWriter(writer); rdfWriter.startRDF(); JCas view = View_Util.getView(jcas, outputViewName); /* Write the document RDF */ Collection<Statement> documentStmts = documentRdfGenerator.generateRdf(jcas, documentMetadataHandler); writeStatements(documentStmts, rdfWriter); /* Write the annotation RDF */ URI documentUri = documentRdfGenerator.getDocumentUri(view, documentMetadataHandler); for (Annotation annot : JCasUtil.select(view, Annotation.class)) { /* * by checking for a null type here we are checking that the * annotation is handled by the AnnotationDataExtractor * implementation. */ String type = annotationDataExtractor.getAnnotationType(annot); if (type != null) { Collection<? extends Statement> stmts = annotationRdfGenerator.generateRdf(annotationDataExtractor, annot, uriFactory, documentUri, view.getDocumentText()); writeStatements(stmts, rdfWriter); } } rdfWriter.endRDF(); } catch (FileNotFoundException e) { throw new AnalysisEngineProcessException(e); } catch (IOException e) { throw new AnalysisEngineProcessException(e); } catch (RDFHandlerException e) { throw new AnalysisEngineProcessException(e); } catch (CASException e) { throw new AnalysisEngineProcessException(e); } } public static AnalysisEngineDescription createDescription(TypeSystemDescription tsd, File outputDirectory, String outputFileInfix, RdfFormat format, boolean compressOutput, Class<? extends DocumentMetadataHandler> documentMetaDataExtractorClass, Class<? extends AnnotationDataExtractor> annotationDataExtractorClass, Class<? extends AnnotationRdfGenerator> annotationRdfGeneratorClass, Class<? extends DocumentRdfGenerator> documentRdfGeneratorClass, Class<? extends UriFactory> uriFactoryClass, String sourceViewName, String outputViewName) throws ResourceInitializationException { // @formatter:off return AnalysisEngineFactory.createEngineDescription(RdfSerialization_AE.class, tsd, PARAM_ANNOTATION_RDF_GENERATOR_CLASS, annotationRdfGeneratorClass.getName(), PARAM_DOCUMENT_RDF_GENERATOR_CLASS, documentRdfGeneratorClass.getName(), PARAM_ANNOTATION_DATA_EXTRACTOR_CLASS, annotationDataExtractorClass.getName(), PARAM_DOCUMENT_METADATA_HANDLER_CLASS, documentMetaDataExtractorClass.getName(), PARAM_URI_FACTORY_CLASS, uriFactoryClass.getName(), PARAM_RDF_FORMAT, format.name(), PARAM_COMPRESS_OUTPUT_FLAG, compressOutput, PARAM_OUTPUT_FILE_INFIX, outputFileInfix, PARAM_OUTPUT_DIRECTORY, outputDirectory.getAbsolutePath(), PARAM_SOURCE_VIEW_NAME, sourceViewName, PARAM_OUTPUT_VIEW_NAME, outputViewName ); // @formatter:on } public static AnalysisEngineDescription createDescription_SaveToSourceFileDirectory(TypeSystemDescription tsd, String outputFileInfix, RdfFormat format, boolean compressOutput, Class<? extends DocumentMetadataHandler> documentMetaDataExtractorClass, Class<? extends AnnotationDataExtractor> annotationDataExtractorClass, Class<? extends AnnotationRdfGenerator> annotationRdfGeneratorClass, Class<? extends DocumentRdfGenerator> documentRdfGeneratorClass, Class<? extends UriFactory> uriFactoryClass, String sourceViewName, String outputViewName) throws ResourceInitializationException { // @formatter:off return AnalysisEngineFactory.createEngineDescription(RdfSerialization_AE.class, tsd, PARAM_ANNOTATION_RDF_GENERATOR_CLASS, annotationRdfGeneratorClass.getName(), PARAM_DOCUMENT_RDF_GENERATOR_CLASS, documentRdfGeneratorClass.getName(), PARAM_ANNOTATION_DATA_EXTRACTOR_CLASS, annotationDataExtractorClass.getName(), PARAM_DOCUMENT_METADATA_HANDLER_CLASS, documentMetaDataExtractorClass.getName(), PARAM_URI_FACTORY_CLASS, uriFactoryClass.getName(), PARAM_RDF_FORMAT, format.name(), PARAM_COMPRESS_OUTPUT_FLAG, compressOutput, PARAM_OUTPUT_FILE_INFIX, outputFileInfix, PARAM_SOURCE_VIEW_NAME, sourceViewName, PARAM_OUTPUT_VIEW_NAME, outputViewName ); // @formatter:on } }
/* * Licensed to Crate under one or more contributor license agreements. * See the NOTICE file distributed with this work for additional * information regarding copyright ownership. Crate licenses this file * to you under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may * obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. * * However, if you have executed another commercial license agreement * with Crate these terms will supersede the license and you may use the * software solely pursuant to the terms of the relevant commercial * agreement. */ package io.crate.data; import io.crate.concurrent.CompletableFutures; import io.crate.exceptions.Exceptions; import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionStage; import java.util.concurrent.Executor; import java.util.function.IntSupplier; import static io.crate.concurrent.CompletableFutures.supplyAsync; /** * BatchIterator implementations backed by multiple other BatchIterators. */ public final class CompositeBatchIterator { /** * Composite batchIterator that consumes each individual iterator fully before moving to the next. */ @SafeVarargs public static <T> BatchIterator<T> seqComposite(BatchIterator<T> ... iterators) { switch (iterators.length) { case 0: return InMemoryBatchIterator.empty(null); case 1: return iterators[0]; default: // prefer loaded iterators over unloaded to improve performance in case only a subset of data is consumed Comparator<BatchIterator<T>> comparing = Comparator.comparing(BatchIterator::allLoaded); Arrays.sort(iterators, comparing.reversed()); return new SeqCompositeBI<>(iterators); } } /** * Composite batchIterator that eagerly loads the individual iterators on `loadNext` multi-threaded */ @SafeVarargs public static <T> BatchIterator<T> asyncComposite(Executor executor, IntSupplier availableThreads, BatchIterator<T> ... iterators) { if (iterators.length == 1) { return iterators[0]; } return new AsyncCompositeBI<>(executor, availableThreads, iterators); } private abstract static class AbstractCompositeBI<T> implements BatchIterator<T> { protected final BatchIterator<T>[] iterators; protected int idx = 0; AbstractCompositeBI(BatchIterator<T>[] iterators) { assert iterators.length > 0 : "Must have at least 1 iterator"; this.iterators = iterators; } @Override public T currentElement() { return iterators[idx].currentElement(); } @Override public void moveToStart() { for (BatchIterator<T> iterator : iterators) { iterator.moveToStart(); } idx = 0; } @Override public void close() { for (BatchIterator<T> iterator : iterators) { iterator.close(); } } @Override public boolean allLoaded() { for (BatchIterator<T> iterator : iterators) { if (iterator.allLoaded() == false) { return false; } } return true; } @Override public void kill(@Nonnull Throwable throwable) { for (BatchIterator<T> iterator : iterators) { iterator.kill(throwable); } } @Override public boolean hasLazyResultSet() { for (BatchIterator<T> iterator : iterators) { if (iterator.hasLazyResultSet()) { return true; } } return false; } } private static class SeqCompositeBI<T> extends AbstractCompositeBI<T> { SeqCompositeBI(BatchIterator<T>[] iterators) { super(iterators); } @Override public boolean moveNext() { while (idx < iterators.length) { BatchIterator<T> iterator = iterators[idx]; if (iterator.moveNext()) { return true; } if (iterator.allLoaded() == false) { return false; } idx++; } idx = 0; return false; } @Override public CompletionStage<?> loadNextBatch() throws Exception { for (BatchIterator<T> iterator : iterators) { if (iterator.allLoaded()) { continue; } return iterator.loadNextBatch(); } throw new IllegalStateException("BatchIterator already fully loaded"); } } private static class AsyncCompositeBI<T> extends AbstractCompositeBI<T> { private final Executor executor; private final IntSupplier availableThreads; AsyncCompositeBI(Executor executor, IntSupplier availableThreads, BatchIterator<T>[] iterators) { super(iterators); this.executor = executor; this.availableThreads = availableThreads; } @Override public boolean moveNext() { while (idx < iterators.length) { BatchIterator<T> iterator = iterators[idx]; if (iterator.moveNext()) { return true; } idx++; } idx = 0; return false; } private int numIteratorsActive() { int activeIts = 0; for (var it : iterators) { if (!it.allLoaded()) { activeIts++; } } return activeIts; } @Override public CompletionStage<?> loadNextBatch() throws Exception { if (allLoaded()) { throw new IllegalStateException("BatchIterator already loaded"); } int activeIts = numIteratorsActive(); int numThreads = Math.max(1, availableThreads.getAsInt()); final int usedThreads = Math.min(numThreads, activeIts); ArrayList<CompletableFuture<CompletableFuture<?>>> nestedFutures = new ArrayList<>(usedThreads); for (int t = 0; t < usedThreads; t++) { final int thread = t; nestedFutures.add(supplyAsync(() -> { ArrayList<CompletableFuture<?>> futures = new ArrayList<>(); for (int i = 0; i < iterators.length; i++) { var it = iterators[i]; if (it.allLoaded() || i % usedThreads != thread) { continue; } try { futures.add(it.loadNextBatch().toCompletableFuture()); } catch (Exception e) { return Exceptions.rethrowRuntimeException(e); } } return CompletableFuture.allOf(futures.toArray(new CompletableFuture[0])); }, executor)); } return CompletableFutures.allAsList(nestedFutures) .thenCompose(innerFutures -> CompletableFuture.allOf(innerFutures.toArray(new CompletableFuture[0]))); } } }
package course.labs.notificationslab; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.io.PrintWriter; import java.util.ArrayList; import android.app.Activity; import android.app.Fragment; import android.app.Notification; import android.app.NotificationManager; import android.app.PendingIntent; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.os.AsyncTask; import android.os.Bundle; import android.util.Log; import android.widget.RemoteViews; import android.widget.Toast; public class DownloaderTaskFragment extends Fragment { private DownloadFinishedListener mCallback; private Context mContext; private final int MY_NOTIFICATION_ID = 11151990; private long[] mVibratePattern = { 0, 200, 200, 300 }; @SuppressWarnings("unused") private static final String TAG = "Lab-Notifications"; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); // Preserve across reconfigurations setRetainInstance(true); // TO-DO: Create new DownloaderTask that "downloads" data DownloaderTask downloaderTask = new DownloaderTask(); // TO-DO: Retrieve arguments from DownloaderTaskFragment // Prepare them for use with DownloaderTask. ArrayList<Integer> integerArrayList = getArguments().getIntegerArrayList(MainActivity.TAG_FRIEND_RES_IDS); // TO-DO: Start the DownloaderTask downloaderTask.execute(integerArrayList); } // Assign current hosting Activity to mCallback // Store application context for use by downloadTweets() @Override public void onAttach(Activity activity) { super.onAttach(activity); mContext = activity.getApplicationContext(); // Make sure that the hosting activity has implemented // the correct callback interface. try { mCallback = (DownloadFinishedListener) activity; } catch (ClassCastException e) { throw new ClassCastException(activity.toString() + " must implement DownloadFinishedListener"); } } // Null out mCallback @Override public void onDetach() { super.onDetach(); mCallback = null; } // TO-DO: Implement an AsyncTask subclass called DownLoaderTask. // This class must use the downloadTweets method (currently commented // out). Ultimately, it must also pass newly available data back to // the hosting Activity using the DownloadFinishedListener interface. public class DownloaderTask extends AsyncTask<ArrayList<Integer>, Integer, String[]> { @Override protected String[] doInBackground(ArrayList<Integer>... params) { ArrayList<Integer> integerArrayList = params[0]; Integer[] resourceIDS = integerArrayList.toArray(new Integer[integerArrayList.size()]); return downloadTweets(resourceIDS); } protected void onPostExecute(String[] result) { if (mCallback != null) { mCallback.notifyDataRefreshed(result); } } } // TO-DO: Uncomment this helper method // Simulates downloading Twitter data from the network private String[] downloadTweets(Integer resourceIDS[]) { final int simulatedDelay = 2000; String[] feeds = new String[resourceIDS.length]; boolean downLoadCompleted = false; try { for (int idx = 0; idx < resourceIDS.length; idx++) { InputStream inputStream; BufferedReader in; try { // Pretend downloading takes a long time Thread.sleep(simulatedDelay); } catch (InterruptedException e) { e.printStackTrace(); } inputStream = mContext.getResources().openRawResource( resourceIDS[idx]); in = new BufferedReader(new InputStreamReader(inputStream)); String readLine; StringBuffer buf = new StringBuffer(); while ((readLine = in.readLine()) != null) { buf.append(readLine); } feeds[idx] = buf.toString(); if (null != in) { in.close(); } } downLoadCompleted = true; saveTweetsToFile(feeds); } catch (IOException e) { e.printStackTrace(); } // Notify user that downloading has finished notify(downLoadCompleted); return feeds; } // Uncomment this helper method. // If necessary, notifies the user that the tweet downloads are // complete. Sends an ordered broadcast back to the BroadcastReceiver in // MainActivity to determine whether the notification is necessary. private void notify(final boolean success) { final Intent restartMainActivityIntent = new Intent(mContext, MainActivity.class); restartMainActivityIntent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK); // Sends an ordered broadcast to determine whether MainActivity is // active and in the foreground. Creates a new BroadcastReceiver // to receive a result indicating the state of MainActivity // The Action for this broadcast Intent is // MainActivity.DATA_REFRESHED_ACTION // The result, MainActivity.IS_ALIVE, indicates that MainActivity is // active and in the foreground. mContext.sendOrderedBroadcast(new Intent( MainActivity.DATA_REFRESHED_ACTION), null, new BroadcastReceiver() { final String failMsg = mContext .getString(R.string.download_failed_string); final String successMsg = mContext .getString(R.string.download_succes_string); final String notificationSentMsg = mContext .getString(R.string.notification_sent_string); @Override public void onReceive(Context context, Intent intent) { // TODO: Check whether or not the MainActivity // received the broadcast if (getResultCode() != MainActivity.IS_ALIVE) { // TODO: If not, create a PendingIntent using // the // restartMainActivityIntent and set its flags // to FLAG_UPDATE_CURRENT PendingIntent pendingIntent = PendingIntent.getActivity(mContext, 0, restartMainActivityIntent, PendingIntent.FLAG_UPDATE_CURRENT); // Uses R.layout.custom_notification for the // layout of the notification View. The xml // file is in res/layout/custom_notification.xml RemoteViews mContentView = new RemoteViews( mContext.getPackageName(), R.layout.custom_notification); // TODO: Set the notification View's text to // reflect whether the download completed // successfully mContentView.setTextViewText(R.id.text, success ? successMsg : failMsg); // TODO: Use the Notification.Builder class to // create the Notification. You will have to set // several pieces of information. You can use // android.R.drawable.stat_sys_warning // for the small icon. You should also // setAutoCancel(true). Notification.Builder notificationBuilder = new Notification.Builder( mContext) .setSmallIcon(android.R.drawable.stat_sys_warning) .setAutoCancel(true) .setContentIntent(pendingIntent) .setVibrate(mVibratePattern) .setContent(mContentView);; // TODO: Send the notification NotificationManager mNotificationManager = (NotificationManager) mContext.getSystemService(Context.NOTIFICATION_SERVICE); mNotificationManager.notify(MY_NOTIFICATION_ID, notificationBuilder.build()); Toast.makeText(mContext, notificationSentMsg, Toast.LENGTH_LONG).show(); } else { Toast.makeText(mContext, success ? successMsg : failMsg, Toast.LENGTH_LONG).show(); } } }, null, 0, null, null); } // Uncomment this helper method // Saves the tweets to a file private void saveTweetsToFile(String[] result) { PrintWriter writer = null; try { FileOutputStream fos = mContext.openFileOutput( MainActivity.TWEET_FILENAME, Context.MODE_PRIVATE); writer = new PrintWriter(new BufferedWriter( new OutputStreamWriter(fos))); for (String s : result) { writer.println(s); } } catch (IOException e) { e.printStackTrace(); } finally { if (null != writer) { writer.close(); } } } }
package com.adunite; import org.apache.cordova.*; import android.app.Activity; import android.util.DisplayMetrics; import android.content.Context; import android.telephony.TelephonyManager; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import java.util.Timer; import java.util.TimerTask; import java.util.Map; import android.util.Log; import android.view.View; import android.view.ViewGroup; import android.widget.RelativeLayout; import android.content.pm.ApplicationInfo; import android.content.pm.PackageManager; import android.content.pm.PackageInfo; import android.net.Uri; import android.net.wifi.WifiInfo; import android.net.wifi.WifiManager; import android.text.format.Formatter; import com.google.android.gms.ads.AdListener; import com.google.android.gms.ads.AdRequest; import com.applovin.adview.AppLovinInterstitialAd; import com.applovin.adview.AppLovinInterstitialAdDialog; import com.applovin.sdk.AppLovinAd; import com.applovin.sdk.AppLovinAdClickListener; import com.applovin.sdk.AppLovinAdDisplayListener; import com.applovin.sdk.AppLovinAdLoadListener; import com.applovin.sdk.AppLovinSdk; import com.jirbo.adcolony.AdColony; import com.jirbo.adcolony.AdColonyAd; import com.jirbo.adcolony.AdColonyAdAvailabilityListener; import com.jirbo.adcolony.AdColonyAdListener; import com.jirbo.adcolony.AdColonyVideoAd; import com.chartboost.sdk.CBLocation; import com.chartboost.sdk.Chartboost; import com.chartboost.sdk.ChartboostDelegate; import com.chartboost.sdk.Model.CBError; public class Adunite extends CordovaPlugin { private static final String LOG_TAG = "Adunite"; private CallbackContext _aduniteCallbackContext; private com.google.android.gms.ads.InterstitialAd _admobInterstitialAd; private AppLovinInterstitialAdDialog _adDialog; private volatile boolean _applovinReady = false; @Override public boolean execute(String action, JSONArray data, CallbackContext callbackContext) throws JSONException { if (action.equals("showAds")) { return showAds(callbackContext, data); } else if (action.equals("init")) { // all ads event callback goes to this callback _aduniteCallbackContext = callbackContext; initAdunite(callbackContext, data.optBoolean(0), data.optString(1), data.optString(2)); PluginResult result = new PluginResult(PluginResult.Status.OK, new JSONObject()); result.setKeepCallback(true); _aduniteCallbackContext.sendPluginResult(result); return true; } else if (action.equals("loadAds")) { return loadAds(callbackContext, data); } else { return false; } } private void initAdunite(CallbackContext callbackContext, final boolean enableApplovin, final String adcolonyAppAndZoneId, final String chartboostAppIdAndSignature) { // some sdk requires init before using // applovin if (enableApplovin) { Log.w(LOG_TAG, "applovin ads is enabled."); _adDialog = AppLovinInterstitialAd.create(AppLovinSdk.getInstance(getActivity()), getActivity()); MyAppLovinListener myAppLovinListener = new MyAppLovinListener(); _adDialog.setAdDisplayListener(myAppLovinListener); _adDialog.setAdLoadListener(myAppLovinListener); _adDialog.setAdClickListener(myAppLovinListener); AppLovinSdk.initializeSdk(getActivity()); // start a polling thread to check if ads is ready to show Thread checkAppLovinThread = new Thread(new Runnable() { public void run() { while (true) { if (_applovinReady == false) { boolean result = _adDialog.isAdReadyToDisplay(); Log.d(LOG_TAG, "checking applovin ready state = " + result); if (result) { sendAdsEventToJs("applovin", "READY", ""); _applovinReady = true; } } try { Thread.sleep(3000); } catch (InterruptedException e) { e.printStackTrace(); } } }}); checkAppLovinThread.start(); } // adcolony if ((adcolonyAppAndZoneId != null) && (!"".equals(adcolonyAppAndZoneId)) && (!"null".equals(adcolonyAppAndZoneId))) { Log.w(LOG_TAG, "adcolony ads is enabled. appId_zoneId=" + adcolonyAppAndZoneId); String[] tokens = adcolonyAppAndZoneId.split("_"); AdColony.configure(getActivity(), "version:1.0,store:google", tokens[0] /* appid */, tokens[1] /* zoneid */); AdColony.addAdAvailabilityListener(new AdColonyListener()); } // chartboost if ((chartboostAppIdAndSignature != null) && (!"".equals(chartboostAppIdAndSignature)) && (!"null".equals(chartboostAppIdAndSignature))) { getActivity().runOnUiThread(new Runnable() { @Override public void run() { Log.w(LOG_TAG, "chartboost ads is enabled. appId_signatureId=" + chartboostAppIdAndSignature); String[] tokens = chartboostAppIdAndSignature.split("_"); Chartboost.setAutoCacheAds(false); Chartboost.startWithAppId(getActivity(), tokens[0] /* appid */, tokens[1] /* signature */); Chartboost.setDelegate(new MyChartboostListener()); Chartboost.onCreate(getActivity()); Chartboost.onStart(getActivity()); } }); } } private boolean loadAds(CallbackContext callbackContext, JSONArray data) { final String networkName = data.optString(0); final String pid = data.optString(1); if ("admob".equals(networkName)) { loadAdmobAds(pid); } else if ("applovin".equals(networkName)) { // no op } else if ("adcolony".equals(networkName)) { // no op } else if ("cb".equals(networkName)) { loadChartboostAds(); } else { Log.e(LOG_TAG, "adnetwork not supported: " + networkName); } return true; } private boolean showAds(CallbackContext callbackContext, JSONArray data) { final String networkName = data.optString(0); if ("admob".equals(networkName)) { showAdmobAds(callbackContext); } else if ("applovin".equals(networkName)) { showApplovinAds(callbackContext); } else if ("adcolony".equals(networkName)) { showAdcolonyAds(callbackContext); } else if ("cb".equals(networkName)) { showChartboostAds(callbackContext); } else { Log.e(LOG_TAG, "adnetwork not supported: " + networkName); } PluginResult result = new PluginResult(PluginResult.Status.OK, networkName); callbackContext.sendPluginResult(result); return true; } // =========== END of public facing methods ================ // Admob private void loadAdmobAds(final String pid) { getActivity().runOnUiThread(new Runnable() { @Override public void run() { Log.i(LOG_TAG, "Trying to load admob ads, pid=" + pid); _admobInterstitialAd = new com.google.android.gms.ads.InterstitialAd(getActivity()); _admobInterstitialAd.setAdUnitId(pid); AdRequest adRequest = new AdRequest.Builder().build(); _admobInterstitialAd.setAdListener(new AdmobAdListener()); _admobInterstitialAd.loadAd(adRequest); } }); } private void showAdmobAds(final CallbackContext callbackContext) { getActivity().runOnUiThread(new Runnable() { @Override public void run() { Log.i(LOG_TAG, "Trying to show admob ads"); if (_admobInterstitialAd != null) { _admobInterstitialAd.show(); } else { Log.e(LOG_TAG, "abmob interstitial not ready, cannot show"); PluginResult result = new PluginResult(PluginResult.Status.ERROR, "admob interstitial not ready, cannot show"); callbackContext.sendPluginResult(result); } } }); } // applovin private void showApplovinAds(final CallbackContext callbackContext) { if (_adDialog.isAdReadyToDisplay()) { Log.i(LOG_TAG, "Trying to show applovin ads"); // NOTE: only after we call show, it would trigger loaded event // this is stupid, makes the logic hard to implement _adDialog.show(); } else { Log.e(LOG_TAG, "applovin ads not ready, cannot show"); PluginResult result = new PluginResult(PluginResult.Status.ERROR, "applovin ads not ready, cannot show"); callbackContext.sendPluginResult(result); } } // adcolony private void showAdcolonyAds(final CallbackContext callbackContext) { Log.i(LOG_TAG, "Trying to show adcolony ads"); AdColonyVideoAd ad = new AdColonyVideoAd().withListener(new AdColonyListener()); ad.show(); } // chartboost private void loadChartboostAds() { getActivity().runOnUiThread(new Runnable() { @Override public void run() { Log.i(LOG_TAG, "Trying to load Chartboost ads"); Chartboost.cacheInterstitial(CBLocation.LOCATION_DEFAULT); } }); } private void showChartboostAds(final CallbackContext callbackContext) { getActivity().runOnUiThread(new Runnable() { @Override public void run() { Log.i(LOG_TAG, "Trying to show chartboost ads"); if (Chartboost.hasInterstitial(CBLocation.LOCATION_DEFAULT)) { Chartboost.showInterstitial(CBLocation.LOCATION_DEFAULT); } else { Log.e(LOG_TAG, "Chartboost interstitial not ready, cannot show"); PluginResult result = new PluginResult(PluginResult.Status.ERROR, "chartboost interstitial not ready, cannot show"); callbackContext.sendPluginResult(result); } } }); } private Activity getActivity() { return cordova.getActivity(); } private void sendAdsEventToJs(String networkName, String eventName, String eventDetail) { Log.w(LOG_TAG, String.format("Emit AdsEvent: %s - %s - %s", networkName, eventName, eventDetail)); PluginResult result = new PluginResult(PluginResult.Status.OK, buildAdsEvent(networkName, eventName, eventDetail)); result.setKeepCallback(true); if (_aduniteCallbackContext != null) { _aduniteCallbackContext.sendPluginResult(result); } else { Log.e(LOG_TAG, String.format("_aduniteCallbackContext is null, cannot send result back, network=%s event=%s", networkName, eventName)); } } private JSONObject buildAdsEvent(String networkName, String eventName, String eventDetail) { JSONObject obj = new JSONObject(); try { obj.put("network_name", networkName); obj.put("event_name", eventName); obj.put("event_detail", eventDetail); } catch (JSONException e) { Log.e(LOG_TAG, e.getMessage(), e); return null; } return obj; } private class AdmobAdListener extends AdListener { @Override public void onAdClosed() { sendAdsEventToJs("admob", "FINISH", ""); } @Override public void onAdFailedToLoad(int errorCode) { sendAdsEventToJs("admob", "LOADERROR", String.valueOf(errorCode)); } @Override public void onAdLeftApplication() { sendAdsEventToJs("admob", "CLICK", ""); } @Override public void onAdOpened() { sendAdsEventToJs("admob", "START", ""); } @Override public void onAdLoaded() { sendAdsEventToJs("admob", "READY", ""); } } private class MyAppLovinListener implements AppLovinAdDisplayListener, AppLovinAdLoadListener, AppLovinAdClickListener { @Override public void adDisplayed(AppLovinAd appLovinAd) { sendAdsEventToJs("applovin", "START", ""); _applovinReady = false; } @Override public void adHidden(AppLovinAd appLovinAd) { sendAdsEventToJs("applovin", "FINISH", ""); } @Override public void adReceived(AppLovinAd appLovinAd) { // This will actually happen after ads is shown, so not useful Log.i(LOG_TAG, "applovin got adReceived event"); // sendAdsEventToJs("applovin", "READY", String.valueOf(appLovinAd.getAdIdNumber())); } @Override public void failedToReceiveAd(int errorCode) { sendAdsEventToJs("applovin", "LOADERROR", String.valueOf(errorCode)); } @Override public void adClicked(AppLovinAd appLovinAd) { sendAdsEventToJs("applovin", "CLICK", ""); } } private class AdColonyListener implements AdColonyAdAvailabilityListener, AdColonyAdListener { @Override public void onAdColonyAdAvailabilityChange(boolean b, String s) { Log.i(LOG_TAG, "adcolony AdAvailabilityChange " + b + " " + s); if (b) { sendAdsEventToJs("adcolony", "READY", ""); } } @Override public void onAdColonyAdAttemptFinished( AdColonyAd ad ) { // Can use the ad object to determine information about the ad attempt: // ad.shown(); // ad.notShown(); // ad.canceled(); // ad.noFill(); // ad.skipped(); if (ad.shown()) { sendAdsEventToJs("adcolony", "FINISH", ""); } } @Override public void onAdColonyAdStarted(AdColonyAd ad) { sendAdsEventToJs("adcolony", "START", ""); } } private class MyChartboostListener extends ChartboostDelegate { @Override public void didDisplayInterstitial(String location) { sendAdsEventToJs("cb", "START", location); } // when ads is loaded, this will be called @Override public void didCacheInterstitial(String location) { sendAdsEventToJs("cb", "READY", location); } @Override public void didFailToLoadInterstitial(String location, CBError.CBImpressionError error) { sendAdsEventToJs("cb", "LOADERROR", String.valueOf(error)); } @Override public void didDismissInterstitial(String location) { sendAdsEventToJs("cb", "FINISH", location); } @Override public void didClickInterstitial(String location) { sendAdsEventToJs("cb", "CLICK", location); } } }
// Copyright 2016 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.rules.java; import static com.google.common.base.Preconditions.checkState; import static com.google.devtools.build.lib.util.Preconditions.checkNotNull; import static java.nio.charset.StandardCharsets.ISO_8859_1; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Joiner; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.devtools.build.lib.actions.Action; import com.google.devtools.build.lib.actions.ActionExecutionContext; import com.google.devtools.build.lib.actions.ActionInput; import com.google.devtools.build.lib.actions.ActionOwner; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.actions.BaseSpawn; import com.google.devtools.build.lib.actions.ExecException; import com.google.devtools.build.lib.actions.ParameterFile; import com.google.devtools.build.lib.actions.ParameterFile.ParameterFileType; import com.google.devtools.build.lib.actions.ResourceSet; import com.google.devtools.build.lib.actions.Spawn; import com.google.devtools.build.lib.actions.SpawnActionContext; import com.google.devtools.build.lib.analysis.RuleContext; import com.google.devtools.build.lib.analysis.actions.CommandLine; import com.google.devtools.build.lib.analysis.actions.CustomCommandLine; import com.google.devtools.build.lib.analysis.actions.ParameterFileWriteAction; import com.google.devtools.build.lib.analysis.actions.SpawnAction; import com.google.devtools.build.lib.analysis.config.BuildConfiguration; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.collect.ImmutableIterable; import com.google.devtools.build.lib.collect.nestedset.NestedSet; import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder; import com.google.devtools.build.lib.collect.nestedset.Order; import com.google.devtools.build.lib.util.Fingerprint; import com.google.devtools.build.lib.vfs.PathFragment; import java.util.ArrayList; import java.util.Collection; import java.util.List; import javax.annotation.Nullable; /** * Action for Java header compilation, to be used if --java_header_compilation is enabled. * * <p>The header compiler consumes the inputs of a java compilation, and produces an interface jar * that can be used as a compile-time jar by upstream targets. The header interface jar is * equivalent to the output of ijar, but unlike ijar the header compiler operates directly on Java * source files instead post-processing the class outputs of the compilation. Compiling the * interface jar from source moves javac off the build's critical path. * * <p>The implementation of the header compiler tool can be found under {@code * //src/java_tools/buildjar/java/com/google/devtools/build/java/turbine}. */ public class JavaHeaderCompileAction extends SpawnAction { private static final String GUID = "952db158-2654-4ced-87e5-4646d50523cf"; private static final ResourceSet LOCAL_RESOURCES = ResourceSet.createWithRamCpuIo(/*memoryMb=*/ 750.0, /*cpuUsage=*/ 0.5, /*ioUsage=*/ 0.0); private final Iterable<Artifact> directInputs; @Nullable private final CommandLine directCommandLine; /** The command line for a direct classpath compilation, or {@code null} if disabled. */ @VisibleForTesting @Nullable public CommandLine directCommandLine() { return directCommandLine; } /** * Constructs an action to compile a set of Java source files to a header interface jar. * * @param owner the action owner, typically a java_* RuleConfiguredTarget * @param tools the set of files comprising the tool that creates the header interface jar * @param directInputs the set of direct input artifacts of the compile action * @param transitiveInputs the set of transitive input artifacts of the compile action * @param outputs the outputs of the action * @param directCommandLine the direct command line arguments for the java header compiler * @param transitiveCommandLine the transitive command line arguments for the java header compiler * @param progressMessage the message printed during the progression of the build */ protected JavaHeaderCompileAction( ActionOwner owner, Iterable<Artifact> tools, Iterable<Artifact> directInputs, Iterable<Artifact> transitiveInputs, Iterable<Artifact> outputs, CommandLine directCommandLine, CommandLine transitiveCommandLine, String progressMessage) { super( owner, tools, transitiveInputs, outputs, LOCAL_RESOURCES, transitiveCommandLine, false, // TODO(#3320): This is missing the config's action environment. JavaCompileAction.UTF8_ACTION_ENVIRONMENT, progressMessage, "Turbine"); this.directInputs = checkNotNull(directInputs); this.directCommandLine = checkNotNull(directCommandLine); } @Override protected String computeKey() { return new Fingerprint() .addString(GUID) .addString(super.computeKey()) .addStrings(directCommandLine.arguments()) .hexDigestAndReset(); } @Override protected void internalExecute(ActionExecutionContext actionExecutionContext) throws ExecException, InterruptedException { SpawnActionContext context = getContext(actionExecutionContext); try { context.exec(getDirectSpawn(), actionExecutionContext); } catch (ExecException e) { // if the direct input spawn failed, try again with transitive inputs to produce better // better messages context.exec(getSpawn(actionExecutionContext.getClientEnv()), actionExecutionContext); // The compilation should never fail with direct deps but succeed with transitive inputs // unless it failed due to a strict deps error, in which case fall back to the transitive // classpath may allow it to succeed (Strict Java Deps errors are reported by javac, // not turbine). } } private final Spawn getDirectSpawn() { return new BaseSpawn( ImmutableList.copyOf(directCommandLine.arguments()), ImmutableMap.<String, String>of() /*environment*/, ImmutableMap.<String, String>of() /*executionInfo*/, this, LOCAL_RESOURCES) { @Override public Iterable<? extends ActionInput> getInputFiles() { return directInputs; } }; } /** Builder class to construct Java header compilation actions. */ public static class Builder { private final RuleContext ruleContext; private Artifact outputJar; @Nullable private Artifact outputDepsProto; private ImmutableSet<Artifact> sourceFiles = ImmutableSet.of(); private final Collection<Artifact> sourceJars = new ArrayList<>(); private NestedSet<Artifact> classpathEntries = NestedSetBuilder.emptySet(Order.NAIVE_LINK_ORDER); private ImmutableIterable<Artifact> bootclasspathEntries = ImmutableIterable.from(ImmutableList.<Artifact>of()); @Nullable private String ruleKind; @Nullable private Label targetLabel; private PathFragment tempDirectory; private BuildConfiguration.StrictDepsMode strictJavaDeps = BuildConfiguration.StrictDepsMode.OFF; private NestedSet<Artifact> directJars = NestedSetBuilder.emptySet(Order.NAIVE_LINK_ORDER); private NestedSet<Artifact> compileTimeDependencyArtifacts = NestedSetBuilder.emptySet(Order.STABLE_ORDER); private ImmutableList<String> javacOpts; private NestedSet<Artifact> processorPath = NestedSetBuilder.emptySet(Order.STABLE_ORDER); private final List<String> processorNames = new ArrayList<>(); private final List<String> processorFlags = new ArrayList<>(); private NestedSet<Artifact> javabaseInputs; private Artifact javacJar; private NestedSet<Artifact> toolsJars = NestedSetBuilder.emptySet(Order.NAIVE_LINK_ORDER); public Builder(RuleContext ruleContext) { this.ruleContext = ruleContext; } /** Sets the output jdeps file. */ public Builder setOutputDepsProto(@Nullable Artifact outputDepsProto) { this.outputDepsProto = outputDepsProto; return this; } /** Sets the direct dependency artifacts. */ public Builder setDirectJars(NestedSet<Artifact> directJars) { checkNotNull(directJars, "directJars must not be null"); this.directJars = directJars; return this; } /** Sets the .jdeps artifacts for direct dependencies. */ public Builder setCompileTimeDependencyArtifacts(NestedSet<Artifact> dependencyArtifacts) { checkNotNull(dependencyArtifacts, "dependencyArtifacts must not be null"); this.compileTimeDependencyArtifacts = dependencyArtifacts; return this; } /** Sets Java compiler flags. */ public Builder setJavacOpts(ImmutableList<String> javacOpts) { checkNotNull(javacOpts, "javacOpts must not be null"); this.javacOpts = javacOpts; return this; } /** Sets the output jar. */ public Builder setOutputJar(Artifact outputJar) { checkNotNull(outputJar, "outputJar must not be null"); this.outputJar = outputJar; return this; } /** Adds Java source files to compile. */ public Builder setSourceFiles(ImmutableSet<Artifact> sourceFiles) { checkNotNull(sourceFiles, "sourceFiles must not be null"); this.sourceFiles = sourceFiles; return this; } /** Adds a jar archive of Java sources to compile. */ public Builder addSourceJars(Collection<Artifact> sourceJars) { checkNotNull(sourceJars, "sourceJars must not be null"); this.sourceJars.addAll(sourceJars); return this; } /** Sets the compilation classpath entries. */ public Builder setClasspathEntries(NestedSet<Artifact> classpathEntries) { checkNotNull(classpathEntries, "classpathEntries must not be null"); this.classpathEntries = classpathEntries; return this; } /** Sets the compilation bootclasspath entries. */ public Builder setBootclasspathEntries(ImmutableIterable<Artifact> bootclasspathEntries) { checkNotNull(bootclasspathEntries, "bootclasspathEntries must not be null"); this.bootclasspathEntries = bootclasspathEntries; return this; } /** Sets the annotation processors classpath entries. */ public Builder setProcessorPaths(NestedSet<Artifact> processorPaths) { checkNotNull(processorPaths, "processorPaths must not be null"); this.processorPath = processorPaths; return this; } /** Sets the fully-qualified class names of annotation processors to run. */ public Builder addProcessorNames(Collection<String> processorNames) { checkNotNull(processorNames, "processorNames must not be null"); this.processorNames.addAll(processorNames); return this; } /** Sets annotation processor flags to pass to javac. */ public Builder addProcessorFlags(Collection<String> processorFlags) { checkNotNull(processorFlags, "processorFlags must not be null"); this.processorFlags.addAll(processorFlags); return this; } /** Sets the kind of the build rule being compiled (e.g. {@code java_library}). */ public Builder setRuleKind(@Nullable String ruleKind) { this.ruleKind = ruleKind; return this; } /** Sets the label of the target being compiled. */ public Builder setTargetLabel(@Nullable Label targetLabel) { this.targetLabel = targetLabel; return this; } /** * Sets the path to a temporary directory, e.g. for extracting sourcejar entries to before * compilation. */ public Builder setTempDirectory(PathFragment tempDirectory) { checkNotNull(tempDirectory, "tempDirectory must not be null"); this.tempDirectory = tempDirectory; return this; } /** Sets the Strict Java Deps mode. */ public Builder setStrictJavaDeps(BuildConfiguration.StrictDepsMode strictJavaDeps) { checkNotNull(strictJavaDeps, "strictJavaDeps must not be null"); this.strictJavaDeps = strictJavaDeps; return this; } /** Sets the javabase inputs. */ public Builder setJavaBaseInputs(NestedSet<Artifact> javabaseInputs) { checkNotNull(javabaseInputs, "javabaseInputs must not be null"); this.javabaseInputs = javabaseInputs; return this; } /** Sets the javac jar. */ public Builder setJavacJar(Artifact javacJar) { checkNotNull(javacJar, "javacJar must not be null"); this.javacJar = javacJar; return this; } /** Sets the tools jars. */ public Builder setToolsJars(NestedSet<Artifact> toolsJars) { checkNotNull(toolsJars, "toolsJars must not be null"); this.toolsJars = toolsJars; return this; } /** Builds and registers the {@link JavaHeaderCompileAction} for a header compilation. */ public void build(JavaToolchainProvider javaToolchain) { ruleContext.registerAction(buildInternal(javaToolchain)); } private Action[] buildInternal(JavaToolchainProvider javaToolchain) { checkNotNull(outputDepsProto, "outputDepsProto must not be null"); checkNotNull(sourceFiles, "sourceFiles must not be null"); checkNotNull(sourceJars, "sourceJars must not be null"); checkNotNull(classpathEntries, "classpathEntries must not be null"); checkNotNull(bootclasspathEntries, "bootclasspathEntries must not be null"); checkNotNull(tempDirectory, "tempDirectory must not be null"); checkNotNull(strictJavaDeps, "strictJavaDeps must not be null"); checkNotNull(directJars, "directJars must not be null"); checkNotNull( compileTimeDependencyArtifacts, "compileTimeDependencyArtifacts must not be null"); checkNotNull(javacOpts, "javacOpts must not be null"); checkNotNull(processorPath, "processorPath must not be null"); checkNotNull(processorNames, "processorNames must not be null"); // Invariant: if strictJavaDeps is OFF, then directJars and // dependencyArtifacts are ignored if (strictJavaDeps == BuildConfiguration.StrictDepsMode.OFF) { directJars = NestedSetBuilder.emptySet(Order.NAIVE_LINK_ORDER); compileTimeDependencyArtifacts = NestedSetBuilder.emptySet(Order.STABLE_ORDER); } // The compilation uses API-generating annotation processors and has to fall back to // javac-turbine. boolean requiresAnnotationProcessing = !processorNames.isEmpty(); NestedSet<Artifact> tools = NestedSetBuilder.<Artifact>stableOrder() .add(javacJar) .add(javaToolchain.getHeaderCompiler()) .addTransitive(toolsJars) .build(); ImmutableList<Artifact> outputs = ImmutableList.of(outputJar, outputDepsProto); NestedSet<Artifact> baseInputs = NestedSetBuilder.<Artifact>stableOrder() .addTransitive(javabaseInputs) .addAll(bootclasspathEntries) .addAll(sourceJars) .addAll(sourceFiles) .addTransitive(tools) .build(); boolean noFallback = ruleContext.getFragment(JavaConfiguration.class).headerCompilationDisableJavacFallback(); // The action doesn't require annotation processing and either javac-turbine fallback is // disabled, or the action doesn't distinguish between direct and transitive deps, so // use a plain SpawnAction to invoke turbine. if ((noFallback || directJars.isEmpty()) && !requiresAnnotationProcessing) { SpawnAction.Builder builder = new SpawnAction.Builder(); NestedSet<Artifact> classpath; if (!directJars.isEmpty() || classpathEntries.isEmpty()) { classpath = directJars; } else { classpath = classpathEntries; // Transitive classpath actions may exceed the command line length limit. builder.alwaysUseParameterFile(ParameterFileType.UNQUOTED); } CustomCommandLine.Builder commandLine = baseCommandLine(CustomCommandLine.builder(), classpath); if (noFallback) { commandLine.add("--nojavac_fallback"); } return builder .addTools(tools) .addTransitiveInputs(baseInputs) .addTransitiveInputs(classpath) .addOutputs(outputs) .setCommandLine(commandLine.build()) .setJarExecutable( JavaCommon.getHostJavaExecutable(ruleContext), javaToolchain.getHeaderCompiler(), javaToolchain.getJvmOptions()) .setMnemonic("Turbine") .setProgressMessage(getProgressMessage()) .build(ruleContext); } CommandLine transitiveParams = transitiveCommandLine(); PathFragment paramFilePath = ParameterFile.derivePath(outputJar.getRootRelativePath()); Artifact paramsFile = ruleContext .getAnalysisEnvironment() .getDerivedArtifact(paramFilePath, outputJar.getRoot()); ParameterFileWriteAction parameterFileWriteAction = new ParameterFileWriteAction( ruleContext.getActionOwner(), paramsFile, transitiveParams, ParameterFile.ParameterFileType.UNQUOTED, ISO_8859_1); CommandLine transitiveCommandLine = getBaseArgs(javaToolchain).addPaths("@%s", paramsFile.getExecPath()).build(); NestedSet<Artifact> transitiveInputs = NestedSetBuilder.<Artifact>stableOrder() .addTransitive(baseInputs) .addTransitive(classpathEntries) .addTransitive(processorPath) .addTransitive(compileTimeDependencyArtifacts) .add(paramsFile) .build(); if (requiresAnnotationProcessing) { // turbine doesn't support API-generating annotation processors, so skip the two-tiered // turbine/javac-turbine action and just use SpawnAction to invoke javac-turbine. return new Action[] { parameterFileWriteAction, new SpawnAction( ruleContext.getActionOwner(), tools, transitiveInputs, outputs, LOCAL_RESOURCES, transitiveCommandLine, false, // TODO(b/63280599): This is missing the config's action environment. JavaCompileAction.UTF8_ACTION_ENVIRONMENT, getProgressMessageWithAnnotationProcessors(), "JavacTurbine") }; } // The action doesn't require annotation processing, javac-turbine fallback is enabled, and // the target distinguishes between direct and transitive deps. Try a two-tiered spawn // the invokes turbine with direct deps, and falls back to javac-turbine on failures to // produce better diagnostics. (At the cost of slower failed actions and a larger // cache footprint.) // TODO(cushon): productionize --nojavac_fallback and remove this path checkState(!directJars.isEmpty()); NestedSet<Artifact> directInputs = NestedSetBuilder.fromNestedSet(baseInputs).addTransitive(directJars).build(); CustomCommandLine directCommandLine = baseCommandLine(getBaseArgs(javaToolchain), directJars).build(); return new Action[] { parameterFileWriteAction, new JavaHeaderCompileAction( ruleContext.getActionOwner(), tools, directInputs, transitiveInputs, outputs, directCommandLine, transitiveCommandLine, getProgressMessage()) }; } private String getProgressMessageWithAnnotationProcessors() { List<String> shortNames = new ArrayList<>(); for (String name : processorNames) { shortNames.add(name.substring(name.lastIndexOf('.') + 1)); } return getProgressMessage() + " and running annotation processors (" + Joiner.on(", ").join(shortNames) + ")"; } private String getProgressMessage() { return String.format( "Compiling Java headers %s (%d files)", outputJar.prettyPrint(), sourceFiles.size() + sourceJars.size()); } private CustomCommandLine.Builder getBaseArgs(JavaToolchainProvider javaToolchain) { return CustomCommandLine.builder() .addPath(JavaCommon.getHostJavaExecutable(ruleContext)) .add("-Xverify:none") .add(javaToolchain.getJvmOptions()) .addExecPath("-jar", javaToolchain.getHeaderCompiler()); } /** * Adds the command line arguments shared by direct classpath and transitive classpath * invocations. */ private CustomCommandLine.Builder baseCommandLine( CustomCommandLine.Builder result, NestedSet<Artifact> classpathEntries) { result.addExecPath("--output", outputJar); if (outputDepsProto != null) { result.addExecPath("--output_deps", outputDepsProto); } result.add("--temp_dir").addPath(tempDirectory); result.addExecPaths("--bootclasspath", bootclasspathEntries); result.addExecPaths("--sources", sourceFiles); if (!sourceJars.isEmpty()) { result.addExecPaths("--source_jars", sourceJars); } result.add("--javacopts", javacOpts); if (ruleKind != null) { result.add("--rule_kind"); result.add(ruleKind); } if (targetLabel != null) { result.add("--target_label"); if (targetLabel.getPackageIdentifier().getRepository().isDefault() || targetLabel.getPackageIdentifier().getRepository().isMain()) { result.add(targetLabel.toString()); } else { // @-prefixed strings will be assumed to be params filenames and expanded, // so add an extra @ to escape it. result.add("@" + targetLabel); } } result.addExecPaths("--classpath", classpathEntries); return result; } /** Builds a transitive classpath command line. */ private CommandLine transitiveCommandLine() { CustomCommandLine.Builder result = CustomCommandLine.builder(); baseCommandLine(result, classpathEntries); if (!processorNames.isEmpty()) { result.add("--processors", processorNames); } if (!processorFlags.isEmpty()) { result.add("--javacopts", processorFlags); } if (!processorPath.isEmpty()) { result.addExecPaths("--processorpath", processorPath); } if (strictJavaDeps != BuildConfiguration.StrictDepsMode.OFF) { result.add(new JavaCompileAction.JarsToTargetsArgv(classpathEntries, directJars)); if (!compileTimeDependencyArtifacts.isEmpty()) { result.addExecPaths("--deps_artifacts", compileTimeDependencyArtifacts); } } return result.build(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.security.access; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.DataInputStream; import java.io.DataOutputStream; import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Abortable; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.LargeTests; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; import org.junit.After; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.ListMultimap; /** * Test the reading and writing of access permissions on {@code _acl_} table. */ @Category(LargeTests.class) public class TestTablePermissions { private static final Log LOG = LogFactory.getLog(TestTablePermissions.class); private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); private static ZooKeeperWatcher ZKW; private final static Abortable ABORTABLE = new Abortable() { private final AtomicBoolean abort = new AtomicBoolean(false); @Override public void abort(String why, Throwable e) { LOG.info(why, e); abort.set(true); } @Override public boolean isAborted() { return abort.get(); } }; private static byte[] TEST_TABLE = Bytes.toBytes("perms_test"); private static byte[] TEST_TABLE2 = Bytes.toBytes("perms_test2"); private static byte[] TEST_FAMILY = Bytes.toBytes("f1"); private static byte[] TEST_QUALIFIER = Bytes.toBytes("col1"); @BeforeClass public static void beforeClass() throws Exception { // setup configuration Configuration conf = UTIL.getConfiguration(); SecureTestUtil.enableSecurity(conf); UTIL.startMiniCluster(); // Wait for the ACL table to become available UTIL.waitTableAvailable(AccessControlLists.ACL_TABLE_NAME, 30000); ZKW = new ZooKeeperWatcher(UTIL.getConfiguration(), "TestTablePermissions", ABORTABLE); UTIL.createTable(TEST_TABLE, TEST_FAMILY); UTIL.createTable(TEST_TABLE2, TEST_FAMILY); } @AfterClass public static void afterClass() throws Exception { UTIL.shutdownMiniCluster(); } @After public void tearDown() throws Exception { Configuration conf = UTIL.getConfiguration(); AccessControlLists.removeTablePermissions(conf, TEST_TABLE); AccessControlLists.removeTablePermissions(conf, TEST_TABLE2); AccessControlLists.removeTablePermissions(conf, AccessControlLists.ACL_TABLE_NAME); } @Test public void testBasicWrite() throws Exception { Configuration conf = UTIL.getConfiguration(); // add some permissions AccessControlLists.addUserPermission(conf, new UserPermission(Bytes.toBytes("george"), TEST_TABLE, null, (byte[])null, UserPermission.Action.READ, UserPermission.Action.WRITE)); AccessControlLists.addUserPermission(conf, new UserPermission(Bytes.toBytes("hubert"), TEST_TABLE, null, (byte[])null, UserPermission.Action.READ)); AccessControlLists.addUserPermission(conf, new UserPermission(Bytes.toBytes("humphrey"), TEST_TABLE, TEST_FAMILY, TEST_QUALIFIER, UserPermission.Action.READ)); // retrieve the same ListMultimap<String,TablePermission> perms = AccessControlLists.getTablePermissions(conf, TEST_TABLE); List<TablePermission> userPerms = perms.get("george"); assertNotNull("Should have permissions for george", userPerms); assertEquals("Should have 1 permission for george", 1, userPerms.size()); TablePermission permission = userPerms.get(0); assertTrue("Permission should be for " + TEST_TABLE, Bytes.equals(TEST_TABLE, permission.getTable())); assertNull("Column family should be empty", permission.getFamily()); // check actions assertNotNull(permission.getActions()); assertEquals(2, permission.getActions().length); List<TablePermission.Action> actions = Arrays.asList(permission.getActions()); assertTrue(actions.contains(TablePermission.Action.READ)); assertTrue(actions.contains(TablePermission.Action.WRITE)); userPerms = perms.get("hubert"); assertNotNull("Should have permissions for hubert", userPerms); assertEquals("Should have 1 permission for hubert", 1, userPerms.size()); permission = userPerms.get(0); assertTrue("Permission should be for " + TEST_TABLE, Bytes.equals(TEST_TABLE, permission.getTable())); assertNull("Column family should be empty", permission.getFamily()); // check actions assertNotNull(permission.getActions()); assertEquals(1, permission.getActions().length); actions = Arrays.asList(permission.getActions()); assertTrue(actions.contains(TablePermission.Action.READ)); assertFalse(actions.contains(TablePermission.Action.WRITE)); userPerms = perms.get("humphrey"); assertNotNull("Should have permissions for humphrey", userPerms); assertEquals("Should have 1 permission for humphrey", 1, userPerms.size()); permission = userPerms.get(0); assertTrue("Permission should be for " + TEST_TABLE, Bytes.equals(TEST_TABLE, permission.getTable())); assertTrue("Permission should be for family " + TEST_FAMILY, Bytes.equals(TEST_FAMILY, permission.getFamily())); assertTrue("Permission should be for qualifier " + TEST_QUALIFIER, Bytes.equals(TEST_QUALIFIER, permission.getQualifier())); // check actions assertNotNull(permission.getActions()); assertEquals(1, permission.getActions().length); actions = Arrays.asList(permission.getActions()); assertTrue(actions.contains(TablePermission.Action.READ)); assertFalse(actions.contains(TablePermission.Action.WRITE)); // table 2 permissions AccessControlLists.addUserPermission(conf, new UserPermission(Bytes.toBytes("hubert"), TEST_TABLE2, null, (byte[])null, TablePermission.Action.READ, TablePermission.Action.WRITE)); // check full load Map<byte[],ListMultimap<String,TablePermission>> allPerms = AccessControlLists.loadAll(conf); assertEquals("Full permission map should have entries for both test tables", 2, allPerms.size()); userPerms = allPerms.get(TEST_TABLE).get("hubert"); assertNotNull(userPerms); assertEquals(1, userPerms.size()); permission = userPerms.get(0); assertTrue(Bytes.equals(TEST_TABLE, permission.getTable())); assertEquals(1, permission.getActions().length); assertEquals(TablePermission.Action.READ, permission.getActions()[0]); userPerms = allPerms.get(TEST_TABLE2).get("hubert"); assertNotNull(userPerms); assertEquals(1, userPerms.size()); permission = userPerms.get(0); assertTrue(Bytes.equals(TEST_TABLE2, permission.getTable())); assertEquals(2, permission.getActions().length); actions = Arrays.asList(permission.getActions()); assertTrue(actions.contains(TablePermission.Action.READ)); assertTrue(actions.contains(TablePermission.Action.WRITE)); } @Test public void testPersistence() throws Exception { Configuration conf = UTIL.getConfiguration(); AccessControlLists.addUserPermission(conf, new UserPermission(Bytes.toBytes("albert"), TEST_TABLE, null, (byte[])null, TablePermission.Action.READ)); AccessControlLists.addUserPermission(conf, new UserPermission(Bytes.toBytes("betty"), TEST_TABLE, null, (byte[])null, TablePermission.Action.READ, TablePermission.Action.WRITE)); AccessControlLists.addUserPermission(conf, new UserPermission(Bytes.toBytes("clark"), TEST_TABLE, TEST_FAMILY, TablePermission.Action.READ)); AccessControlLists.addUserPermission(conf, new UserPermission(Bytes.toBytes("dwight"), TEST_TABLE, TEST_FAMILY, TEST_QUALIFIER, TablePermission.Action.WRITE)); // verify permissions survive changes in table metadata ListMultimap<String,TablePermission> preperms = AccessControlLists.getTablePermissions(conf, TEST_TABLE); HTable table = new HTable(conf, TEST_TABLE); table.put(new Put(Bytes.toBytes("row1")) .add(TEST_FAMILY, TEST_QUALIFIER, Bytes.toBytes("v1"))); table.put(new Put(Bytes.toBytes("row2")) .add(TEST_FAMILY, TEST_QUALIFIER, Bytes.toBytes("v2"))); HBaseAdmin admin = UTIL.getHBaseAdmin(); admin.split(TEST_TABLE); // wait for split Thread.sleep(10000); ListMultimap<String,TablePermission> postperms = AccessControlLists.getTablePermissions(conf, TEST_TABLE); checkMultimapEqual(preperms, postperms); } @Test public void testSerialization() throws Exception { Configuration conf = UTIL.getConfiguration(); ListMultimap<String,TablePermission> permissions = ArrayListMultimap.create(); permissions.put("george", new TablePermission(TEST_TABLE, null, TablePermission.Action.READ)); permissions.put("george", new TablePermission(TEST_TABLE, TEST_FAMILY, TablePermission.Action.WRITE)); permissions.put("george", new TablePermission(TEST_TABLE2, null, TablePermission.Action.READ)); permissions.put("hubert", new TablePermission(TEST_TABLE2, null, TablePermission.Action.READ, TablePermission.Action.WRITE)); ByteArrayOutputStream bos = new ByteArrayOutputStream(); AccessControlLists.writePermissions(new DataOutputStream(bos), permissions, conf); ByteArrayInputStream bis = new ByteArrayInputStream(bos.toByteArray()); ListMultimap<String,TablePermission> copy = AccessControlLists.readPermissions(new DataInputStream(bis), conf); checkMultimapEqual(permissions, copy); } public void checkMultimapEqual(ListMultimap<String,TablePermission> first, ListMultimap<String,TablePermission> second) { assertEquals(first.size(), second.size()); for (String key : first.keySet()) { List<TablePermission> firstPerms = first.get(key); List<TablePermission> secondPerms = second.get(key); assertNotNull(secondPerms); assertEquals(firstPerms.size(), secondPerms.size()); LOG.info("First permissions: "+firstPerms.toString()); LOG.info("Second permissions: "+secondPerms.toString()); for (TablePermission p : firstPerms) { assertTrue("Permission "+p.toString()+" not found", secondPerms.contains(p)); } } } @Test public void testEquals() throws Exception { TablePermission p1 = new TablePermission(TEST_TABLE, null, TablePermission.Action.READ); TablePermission p2 = new TablePermission(TEST_TABLE, null, TablePermission.Action.READ); assertTrue(p1.equals(p2)); assertTrue(p2.equals(p1)); p1 = new TablePermission(TEST_TABLE, null, TablePermission.Action.READ, TablePermission.Action.WRITE); p2 = new TablePermission(TEST_TABLE, null, TablePermission.Action.WRITE, TablePermission.Action.READ); assertTrue(p1.equals(p2)); assertTrue(p2.equals(p1)); p1 = new TablePermission(TEST_TABLE, TEST_FAMILY, TablePermission.Action.READ, TablePermission.Action.WRITE); p2 = new TablePermission(TEST_TABLE, TEST_FAMILY, TablePermission.Action.WRITE, TablePermission.Action.READ); assertTrue(p1.equals(p2)); assertTrue(p2.equals(p1)); p1 = new TablePermission(TEST_TABLE, TEST_FAMILY, TEST_QUALIFIER, TablePermission.Action.READ, TablePermission.Action.WRITE); p2 = new TablePermission(TEST_TABLE, TEST_FAMILY, TEST_QUALIFIER, TablePermission.Action.WRITE, TablePermission.Action.READ); assertTrue(p1.equals(p2)); assertTrue(p2.equals(p1)); p1 = new TablePermission(TEST_TABLE, null, TablePermission.Action.READ); p2 = new TablePermission(TEST_TABLE, TEST_FAMILY, TablePermission.Action.READ); assertFalse(p1.equals(p2)); assertFalse(p2.equals(p1)); p1 = new TablePermission(TEST_TABLE, null, TablePermission.Action.READ); p2 = new TablePermission(TEST_TABLE, null, TablePermission.Action.WRITE); assertFalse(p1.equals(p2)); assertFalse(p2.equals(p1)); p2 = new TablePermission(TEST_TABLE, null, TablePermission.Action.READ, TablePermission.Action.WRITE); assertFalse(p1.equals(p2)); assertFalse(p2.equals(p1)); p1 = new TablePermission(TEST_TABLE, null, TablePermission.Action.READ); p2 = new TablePermission(TEST_TABLE2, null, TablePermission.Action.READ); assertFalse(p1.equals(p2)); assertFalse(p2.equals(p1)); p2 = new TablePermission(TEST_TABLE, null); assertFalse(p1.equals(p2)); assertFalse(p2.equals(p1)); } @Test public void testGlobalPermission() throws Exception { Configuration conf = UTIL.getConfiguration(); // add some permissions AccessControlLists.addUserPermission(conf, new UserPermission(Bytes.toBytes("user1"), Permission.Action.READ, Permission.Action.WRITE)); AccessControlLists.addUserPermission(conf, new UserPermission(Bytes.toBytes("user2"), Permission.Action.CREATE)); AccessControlLists.addUserPermission(conf, new UserPermission(Bytes.toBytes("user3"), Permission.Action.ADMIN, Permission.Action.READ, Permission.Action.CREATE)); ListMultimap<String,TablePermission> perms = AccessControlLists.getTablePermissions(conf, null); List<TablePermission> user1Perms = perms.get("user1"); assertEquals("Should have 1 permission for user1", 1, user1Perms.size()); assertEquals("user1 should have WRITE permission", new Permission.Action[] { Permission.Action.READ, Permission.Action.WRITE }, user1Perms.get(0).getActions()); List<TablePermission> user2Perms = perms.get("user2"); assertEquals("Should have 1 permission for user2", 1, user2Perms.size()); assertEquals("user2 should have CREATE permission", new Permission.Action[] { Permission.Action.CREATE }, user2Perms.get(0).getActions()); List<TablePermission> user3Perms = perms.get("user3"); assertEquals("Should have 1 permission for user3", 1, user3Perms.size()); assertEquals("user3 should have ADMIN, READ, CREATE permission", new Permission.Action[] { Permission.Action.ADMIN, Permission.Action.READ, Permission.Action.CREATE }, user3Perms.get(0).getActions()); } @Test public void testAuthManager() throws Exception { Configuration conf = UTIL.getConfiguration(); /* test a race condition causing TableAuthManager to sometimes fail global permissions checks * when the global cache is being updated */ TableAuthManager authManager = TableAuthManager.get(ZKW, conf); // currently running user is the system user and should have global admin perms User currentUser = User.getCurrent(); assertTrue(authManager.authorize(currentUser, Permission.Action.ADMIN)); for (int i=1; i<=50; i++) { AccessControlLists.addUserPermission(conf, new UserPermission(Bytes.toBytes("testauth"+i), Permission.Action.ADMIN, Permission.Action.READ, Permission.Action.WRITE)); // make sure the system user still shows as authorized assertTrue("Failed current user auth check on iter "+i, authManager.authorize(currentUser, Permission.Action.ADMIN)); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.geronimo.shell.diagnose; import static org.eclipse.osgi.service.resolver.ResolverError.IMPORT_PACKAGE_USES_CONFLICT; import static org.eclipse.osgi.service.resolver.ResolverError.MISSING_FRAGMENT_HOST; import static org.eclipse.osgi.service.resolver.ResolverError.MISSING_IMPORT_PACKAGE; import static org.eclipse.osgi.service.resolver.ResolverError.MISSING_REQUIRE_BUNDLE; import static org.eclipse.osgi.service.resolver.ResolverError.REQUIRE_BUNDLE_USES_CONFLICT; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import org.apache.karaf.shell.commands.Argument; import org.apache.karaf.shell.commands.Command; import org.apache.karaf.shell.commands.Option; import org.apache.karaf.shell.console.OsgiCommandSupport; import org.eclipse.osgi.service.resolver.BundleDescription; import org.eclipse.osgi.service.resolver.BundleSpecification; import org.eclipse.osgi.service.resolver.ExportPackageDescription; import org.eclipse.osgi.service.resolver.HostSpecification; import org.eclipse.osgi.service.resolver.ImportPackageSpecification; import org.eclipse.osgi.service.resolver.PlatformAdmin; import org.eclipse.osgi.service.resolver.ResolverError; import org.eclipse.osgi.service.resolver.State; import org.eclipse.osgi.service.resolver.StateHelper; import org.eclipse.osgi.service.resolver.VersionConstraint; import org.osgi.framework.Constants; import org.osgi.framework.ServiceReference; @Command(scope = "equinox", name = "diagnose", description = "Diagnose common OSGi resolver problems") public class DiagnoseCommand extends OsgiCommandSupport { @Argument(index = 0, name = "ids", description = "The list of bundle IDs separated by whitespaces", required = true, multiValued = true) List<Long> ids; @Option(name = "-s", aliases = { "--simple" }, description = "Do not perform deeper analysis of resolver problems") boolean simple = false; private boolean hasPlatformAdmin() { try { bundleContext.getBundle().loadClass("org.eclipse.osgi.service.resolver.PlatformAdmin"); return true; } catch (ClassNotFoundException e) { return false; } } protected Object doExecute() throws Exception { if (!hasPlatformAdmin()) { System.err.println("This command is only supported on Equinox."); return null; } ServiceReference ref = bundleContext.getServiceReference(PlatformAdmin.class.getName()); PlatformAdmin platformAdmin = (PlatformAdmin) getService(PlatformAdmin.class, ref); try { State systemState = platformAdmin.getState(false); Iterator<Long> iterator = ids.iterator(); while (iterator.hasNext()) { Long id = iterator.next(); BundleDescription bundle = systemState.getBundle(id); if (bundle == null) { System.err.println("Bundle ID" + id + " is invalid"); continue; } diagnose(bundle, platformAdmin); if (iterator.hasNext()) { System.out.println(); } } } finally { bundleContext.ungetService(ref); } return null; } private void diagnose(BundleDescription bundle, PlatformAdmin platformAdmin) { System.out.println(Utils.bundleToString(bundle)); StateHelper stateHelper = platformAdmin.getStateHelper(); VersionConstraint[] unsatisfied = stateHelper.getUnsatisfiedConstraints(bundle); ResolverError[] resolverErrors = analyzeErrors(bundle, platformAdmin.getState(false)); if (unsatisfied.length == 0 && resolverErrors.length == 0) { System.out.println(Utils.formatMessage(2, "No unresolved constraints.")); } if (unsatisfied.length > 0) { System.out.println(Utils.formatMessage(2, "Unresolved constraints:")); for (int i = 0; i < unsatisfied.length; i++) { System.out.println(Utils.formatMessage(3, getResolutionFailureMessage(unsatisfied[i]))); } } } public static String getResolutionFailureMessage(VersionConstraint unsatisfied) { if (unsatisfied.isResolved()) throw new IllegalArgumentException(); if (unsatisfied instanceof ImportPackageSpecification) { String resolution = (String) ((ImportPackageSpecification) unsatisfied).getDirective(Constants.RESOLUTION_DIRECTIVE); if (ImportPackageSpecification.RESOLUTION_OPTIONAL.equals(resolution)) { return Utils.warning("Missing optionally imported package " + Utils.versionToString(unsatisfied)); } else if (ImportPackageSpecification.RESOLUTION_DYNAMIC.equals(resolution)) { return Utils.warning("Missing dynamically imported package " + Utils.versionToString(unsatisfied)); } else { return Utils.error("Missing imported package " + Utils.versionToString(unsatisfied)); } } else if (unsatisfied instanceof BundleSpecification) { if (((BundleSpecification) unsatisfied).isOptional()) { return Utils.warning("Missing optionally required bundle " + Utils.versionToString(unsatisfied)); } else { return Utils.error("Missing required bundle " + Utils.versionToString(unsatisfied)); } } else if (unsatisfied instanceof HostSpecification) { return Utils.error("Missing host bundle " + Utils.versionToString(unsatisfied)); } else { return Utils.error("Unknown problem"); } } public ResolverError[] analyzeErrors(BundleDescription bundle, State state) { return analyzeErrors(bundle, state, new HashSet<BundleDescription>(), 2); } private ResolverError[] analyzeErrors(BundleDescription bundle, State state, Set<BundleDescription> bundles, int level) { if (bundles.contains(bundle)) { return null; } bundles.add(bundle); ResolverError[] errors = state.getResolverErrors(bundle); if (level == 2 && errors.length > 0) { System.out.println(Utils.formatMessage(level, "Resolver errors:")); } PackageUsesHelper helper = null; for (ResolverError error : errors) { Utils.displayError(bundle, level, error.toString()); VersionConstraint constraint = error.getUnsatisfiedConstraint(); switch (error.getType()) { case MISSING_IMPORT_PACKAGE: ImportPackageSpecification pkgSpec = (ImportPackageSpecification)constraint; for (BundleDescription b : state.getBundles()) { for (ExportPackageDescription pkg : b.getExportPackages()) { if (pkg.getName().equals(pkgSpec.getName())) { if (pkgSpec.getVersionRange().isIncluded(pkg.getVersion())) { if (!pkg.getExporter().isResolved()) { Utils.displayError(b, level + 1, "Bundle unresolved: " + pkg); analyzeErrors(pkg.getExporter(), state, bundles, level + 1); } } else { Utils.displayError(b, level + 1, "Version mismatch: " + pkgSpec + " " + pkg); } } } } break; case MISSING_REQUIRE_BUNDLE: case MISSING_FRAGMENT_HOST: for (BundleDescription b : state.getBundles()) { if (b == bundle) { continue; } if (b.getSymbolicName() == null) { Utils.displayError(b, level, "No SymbolicName for " + b.getLocation()); continue; } if (constraint.getName() == null) { Utils.displayError(bundle, level, "No constraint name: " + constraint); } if (b.getSymbolicName().equals(constraint.getName())) { if (constraint.getVersionRange().isIncluded(b.getVersion())) { // There must be something wrong in the bundle analyzeErrors(b, state, bundles, level + 1); } else { Utils.displayError(bundle, level, "Version mismatch: " + constraint + " " + b); } } } break; case IMPORT_PACKAGE_USES_CONFLICT: if (!simple) { // multiple conflicts can be reported on the same bundle // so ensure helper only runs once. if (helper == null) { helper = new PackageUsesHelper(state); helper.analyzeConflict(bundle, level); } } break; case REQUIRE_BUNDLE_USES_CONFLICT: default: // error is already logged break; } } return errors; } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.sql.rewrite; import com.facebook.presto.Session; import com.facebook.presto.connector.ConnectorId; import com.facebook.presto.metadata.FunctionKind; import com.facebook.presto.metadata.Metadata; import com.facebook.presto.metadata.QualifiedObjectName; import com.facebook.presto.metadata.SessionPropertyManager.SessionPropertyValue; import com.facebook.presto.metadata.SqlFunction; import com.facebook.presto.metadata.TableHandle; import com.facebook.presto.metadata.TableLayout; import com.facebook.presto.metadata.TableLayoutResult; import com.facebook.presto.metadata.ViewDefinition; import com.facebook.presto.security.AccessControl; import com.facebook.presto.spi.CatalogSchemaName; import com.facebook.presto.spi.ColumnHandle; import com.facebook.presto.spi.ColumnMetadata; import com.facebook.presto.spi.ConnectorTableMetadata; import com.facebook.presto.spi.Constraint; import com.facebook.presto.spi.PrestoException; import com.facebook.presto.spi.SchemaTableName; import com.facebook.presto.spi.session.PropertyMetadata; import com.facebook.presto.sql.analyzer.QueryExplainer; import com.facebook.presto.sql.analyzer.SemanticException; import com.facebook.presto.sql.parser.ParsingException; import com.facebook.presto.sql.parser.SqlParser; import com.facebook.presto.sql.tree.AllColumns; import com.facebook.presto.sql.tree.ArrayConstructor; import com.facebook.presto.sql.tree.AstVisitor; import com.facebook.presto.sql.tree.BooleanLiteral; import com.facebook.presto.sql.tree.Cast; import com.facebook.presto.sql.tree.ColumnDefinition; import com.facebook.presto.sql.tree.CreateTable; import com.facebook.presto.sql.tree.CreateView; import com.facebook.presto.sql.tree.DoubleLiteral; import com.facebook.presto.sql.tree.Explain; import com.facebook.presto.sql.tree.Expression; import com.facebook.presto.sql.tree.GroupBy; import com.facebook.presto.sql.tree.Identifier; import com.facebook.presto.sql.tree.LikePredicate; import com.facebook.presto.sql.tree.LongLiteral; import com.facebook.presto.sql.tree.Node; import com.facebook.presto.sql.tree.OrderBy; import com.facebook.presto.sql.tree.QualifiedName; import com.facebook.presto.sql.tree.Query; import com.facebook.presto.sql.tree.Relation; import com.facebook.presto.sql.tree.SelectItem; import com.facebook.presto.sql.tree.ShowCatalogs; import com.facebook.presto.sql.tree.ShowColumns; import com.facebook.presto.sql.tree.ShowCreate; import com.facebook.presto.sql.tree.ShowFunctions; import com.facebook.presto.sql.tree.ShowGrants; import com.facebook.presto.sql.tree.ShowPartitions; import com.facebook.presto.sql.tree.ShowSchemas; import com.facebook.presto.sql.tree.ShowSession; import com.facebook.presto.sql.tree.ShowTables; import com.facebook.presto.sql.tree.SimpleGroupBy; import com.facebook.presto.sql.tree.SingleColumn; import com.facebook.presto.sql.tree.SortItem; import com.facebook.presto.sql.tree.Statement; import com.facebook.presto.sql.tree.StringLiteral; import com.facebook.presto.sql.tree.TableElement; import com.facebook.presto.sql.tree.Values; import com.google.common.base.Joiner; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.SortedMap; import static com.facebook.presto.connector.informationSchema.InformationSchemaMetadata.TABLE_COLUMNS; import static com.facebook.presto.connector.informationSchema.InformationSchemaMetadata.TABLE_INTERNAL_PARTITIONS; import static com.facebook.presto.connector.informationSchema.InformationSchemaMetadata.TABLE_SCHEMATA; import static com.facebook.presto.connector.informationSchema.InformationSchemaMetadata.TABLE_TABLES; import static com.facebook.presto.connector.informationSchema.InformationSchemaMetadata.TABLE_TABLE_PRIVILEGES; import static com.facebook.presto.metadata.MetadataListing.listCatalogs; import static com.facebook.presto.metadata.MetadataListing.listSchemas; import static com.facebook.presto.metadata.MetadataUtil.createCatalogSchemaName; import static com.facebook.presto.metadata.MetadataUtil.createQualifiedName; import static com.facebook.presto.metadata.MetadataUtil.createQualifiedObjectName; import static com.facebook.presto.spi.StandardErrorCode.INVALID_TABLE_PROPERTY; import static com.facebook.presto.sql.QueryUtil.aliased; import static com.facebook.presto.sql.QueryUtil.aliasedName; import static com.facebook.presto.sql.QueryUtil.aliasedNullToEmpty; import static com.facebook.presto.sql.QueryUtil.ascending; import static com.facebook.presto.sql.QueryUtil.caseWhen; import static com.facebook.presto.sql.QueryUtil.equal; import static com.facebook.presto.sql.QueryUtil.functionCall; import static com.facebook.presto.sql.QueryUtil.identifier; import static com.facebook.presto.sql.QueryUtil.logicalAnd; import static com.facebook.presto.sql.QueryUtil.ordering; import static com.facebook.presto.sql.QueryUtil.row; import static com.facebook.presto.sql.QueryUtil.selectAll; import static com.facebook.presto.sql.QueryUtil.selectList; import static com.facebook.presto.sql.QueryUtil.simpleQuery; import static com.facebook.presto.sql.QueryUtil.singleValueQuery; import static com.facebook.presto.sql.QueryUtil.subquery; import static com.facebook.presto.sql.QueryUtil.table; import static com.facebook.presto.sql.QueryUtil.unaliasedName; import static com.facebook.presto.sql.SqlFormatter.formatSql; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.CATALOG_NOT_SPECIFIED; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.MISSING_SCHEMA; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.MISSING_TABLE; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.NOT_SUPPORTED; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.VIEW_PARSE_ERROR; import static com.facebook.presto.sql.tree.BooleanLiteral.FALSE_LITERAL; import static com.facebook.presto.sql.tree.BooleanLiteral.TRUE_LITERAL; import static com.facebook.presto.sql.tree.ShowCreate.Type.TABLE; import static com.facebook.presto.sql.tree.ShowCreate.Type.VIEW; import static com.google.common.base.Strings.nullToEmpty; import static com.google.common.collect.ImmutableList.toImmutableList; import static com.google.common.collect.Iterables.getOnlyElement; import static java.lang.String.format; import static java.util.Objects.requireNonNull; import static java.util.stream.Collectors.toList; final class ShowQueriesRewrite implements StatementRewrite.Rewrite { @Override public Statement rewrite( Session session, Metadata metadata, SqlParser parser, Optional<QueryExplainer> queryExplainer, Statement node, List<Expression> parameters, AccessControl accessControl) { return (Statement) new Visitor(metadata, parser, session, parameters, accessControl, queryExplainer).process(node, null); } private static class Visitor extends AstVisitor<Node, Void> { private final Metadata metadata; private final Session session; private final SqlParser sqlParser; List<Expression> parameters; private final AccessControl accessControl; private Optional<QueryExplainer> queryExplainer; public Visitor(Metadata metadata, SqlParser sqlParser, Session session, List<Expression> parameters, AccessControl accessControl, Optional<QueryExplainer> queryExplainer) { this.metadata = requireNonNull(metadata, "metadata is null"); this.sqlParser = requireNonNull(sqlParser, "sqlParser is null"); this.session = requireNonNull(session, "session is null"); this.parameters = requireNonNull(parameters, "parameters is null"); this.accessControl = requireNonNull(accessControl, "accessControl is null"); this.queryExplainer = requireNonNull(queryExplainer, "queryExplainer is null"); } @Override protected Node visitExplain(Explain node, Void context) { Statement statement = (Statement) process(node.getStatement(), null); return new Explain( node.getLocation().get(), node.isAnalyze(), node.isVerbose(), statement, node.getOptions()); } @Override protected Node visitShowTables(ShowTables showTables, Void context) { CatalogSchemaName schema = createCatalogSchemaName(session, showTables, showTables.getSchema()); accessControl.checkCanShowTablesMetadata(session.getRequiredTransactionId(), session.getIdentity(), schema); if (!metadata.schemaExists(session, schema)) { throw new SemanticException(MISSING_SCHEMA, showTables, "Schema '%s' does not exist", schema.getSchemaName()); } Expression predicate = equal(identifier("table_schema"), new StringLiteral(schema.getSchemaName())); Optional<String> likePattern = showTables.getLikePattern(); if (likePattern.isPresent()) { Expression likePredicate = new LikePredicate(identifier("table_name"), new StringLiteral(likePattern.get()), null); predicate = logicalAnd(predicate, likePredicate); } return simpleQuery( selectList(aliasedName("table_name", "Table")), from(schema.getCatalogName(), TABLE_TABLES), predicate, ordering(ascending("table_name"))); } @Override protected Node visitShowGrants(ShowGrants showGrants, Void context) { String catalogName = session.getCatalog().orElse(null); Optional<Expression> predicate = Optional.empty(); Optional<QualifiedName> tableName = showGrants.getTableName(); if (tableName.isPresent()) { QualifiedObjectName qualifiedTableName = createQualifiedObjectName(session, showGrants, tableName.get()); if (!metadata.getView(session, qualifiedTableName).isPresent() && !metadata.getTableHandle(session, qualifiedTableName).isPresent()) { throw new SemanticException(MISSING_TABLE, showGrants, "Table '%s' does not exist", tableName); } catalogName = qualifiedTableName.getCatalogName(); accessControl.checkCanShowTablesMetadata( session.getRequiredTransactionId(), session.getIdentity(), new CatalogSchemaName(catalogName, qualifiedTableName.getSchemaName())); predicate = Optional.of(equal(identifier("table_name"), new StringLiteral(qualifiedTableName.getObjectName()))); } if (catalogName == null) { throw new SemanticException(CATALOG_NOT_SPECIFIED, showGrants, "Catalog must be specified when session catalog is not set"); } Set<String> allowedSchemas = listSchemas(session, metadata, accessControl, catalogName); for (String schema : allowedSchemas) { accessControl.checkCanShowTablesMetadata(session.getRequiredTransactionId(), session.getIdentity(), new CatalogSchemaName(catalogName, schema)); } return simpleQuery( selectList( aliasedName("grantee", "Grantee"), aliasedName("table_catalog", "Catalog"), aliasedName("table_schema", "Schema"), aliasedName("table_name", "Table"), aliasedName("privilege_type", "Privilege"), aliasedName("is_grantable", "Grantable")), from(catalogName, TABLE_TABLE_PRIVILEGES), predicate, Optional.of(ordering(ascending("grantee"), ascending("table_name")))); } @Override protected Node visitShowSchemas(ShowSchemas node, Void context) { if (!node.getCatalog().isPresent() && !session.getCatalog().isPresent()) { throw new SemanticException(CATALOG_NOT_SPECIFIED, node, "Catalog must be specified when session catalog is not set"); } String catalog = node.getCatalog().map(Identifier::getValue).orElseGet(() -> session.getCatalog().get()); accessControl.checkCanShowSchemas(session.getRequiredTransactionId(), session.getIdentity(), catalog); Optional<Expression> predicate = Optional.empty(); Optional<String> likePattern = node.getLikePattern(); if (likePattern.isPresent()) { predicate = Optional.of(new LikePredicate(identifier("schema_name"), new StringLiteral(likePattern.get()), null)); } return simpleQuery( selectList(aliasedName("schema_name", "Schema")), from(catalog, TABLE_SCHEMATA), predicate, Optional.of(ordering(ascending("schema_name")))); } @Override protected Node visitShowCatalogs(ShowCatalogs node, Void context) { List<Expression> rows = listCatalogs(session, metadata, accessControl).keySet().stream() .map(name -> row(new StringLiteral(name))) .collect(toList()); Optional<Expression> predicate = Optional.empty(); Optional<String> likePattern = node.getLikePattern(); if (likePattern.isPresent()) { predicate = Optional.of(new LikePredicate(identifier("Catalog"), new StringLiteral(likePattern.get()), null)); } return simpleQuery( selectList(new AllColumns()), aliased(new Values(rows), "catalogs", ImmutableList.of("Catalog")), predicate, Optional.of(ordering(ascending("Catalog")))); } @Override protected Node visitShowColumns(ShowColumns showColumns, Void context) { QualifiedObjectName tableName = createQualifiedObjectName(session, showColumns, showColumns.getTable()); if (!metadata.getView(session, tableName).isPresent() && !metadata.getTableHandle(session, tableName).isPresent()) { throw new SemanticException(MISSING_TABLE, showColumns, "Table '%s' does not exist", tableName); } return simpleQuery( selectList( aliasedName("column_name", "Column"), aliasedName("data_type", "Type"), aliasedNullToEmpty("extra_info", "Extra"), aliasedNullToEmpty("comment", "Comment")), from(tableName.getCatalogName(), TABLE_COLUMNS), logicalAnd( equal(identifier("table_schema"), new StringLiteral(tableName.getSchemaName())), equal(identifier("table_name"), new StringLiteral(tableName.getObjectName()))), ordering(ascending("ordinal_position"))); } private static <T> Expression getExpression(PropertyMetadata<T> property, Object value) throws PrestoException { return toExpression(property.encode(property.getJavaType().cast(value))); } private static Expression toExpression(Object value) throws PrestoException { if (value instanceof String) { return new StringLiteral(value.toString()); } if (value instanceof Boolean) { return new BooleanLiteral(value.toString()); } if (value instanceof Long || value instanceof Integer) { return new LongLiteral(value.toString()); } if (value instanceof Double) { return new DoubleLiteral(value.toString()); } if (value instanceof List) { List<?> list = (List<?>) value; return new ArrayConstructor(list.stream() .map(Visitor::toExpression) .collect(toList())); } throw new PrestoException(INVALID_TABLE_PROPERTY, format("Failed to convert object of type %s to expression: %s", value.getClass().getName(), value)); } @Override protected Node visitShowPartitions(ShowPartitions showPartitions, Void context) { QualifiedObjectName table = createQualifiedObjectName(session, showPartitions, showPartitions.getTable()); Optional<TableHandle> tableHandle = metadata.getTableHandle(session, table); if (!tableHandle.isPresent()) { throw new SemanticException(MISSING_TABLE, showPartitions, "Table '%s' does not exist", table); } List<TableLayoutResult> layouts = metadata.getLayouts(session, tableHandle.get(), Constraint.alwaysTrue(), Optional.empty()); if (layouts.size() != 1) { throw new SemanticException(NOT_SUPPORTED, showPartitions, "Table does not have exactly one layout: %s", table); } TableLayout layout = getOnlyElement(layouts).getLayout(); if (!layout.getDiscretePredicates().isPresent()) { throw new SemanticException(NOT_SUPPORTED, showPartitions, "Table does not have partition columns: %s", table); } List<ColumnHandle> partitionColumns = layout.getDiscretePredicates().get().getColumns(); /* Generate a dynamic pivot to output one column per partition key. For example, a table with two partition keys (ds, cluster_name) would generate the following query: SELECT partition_number , max(CASE WHEN partition_key = 'ds' THEN partition_value END) ds , max(CASE WHEN partition_key = 'cluster_name' THEN partition_value END) cluster_name FROM ... GROUP BY partition_number The values are also cast to the type of the partition column. The query is then wrapped to allow custom filtering and ordering. */ ImmutableList.Builder<SelectItem> selectList = ImmutableList.builder(); ImmutableList.Builder<SelectItem> wrappedList = ImmutableList.builder(); selectList.add(unaliasedName("partition_number")); for (ColumnHandle columnHandle : partitionColumns) { ColumnMetadata column = metadata.getColumnMetadata(session, tableHandle.get(), columnHandle); Expression key = equal(identifier("partition_key"), new StringLiteral(column.getName())); Expression value = caseWhen(key, identifier("partition_value")); value = new Cast(value, column.getType().getTypeSignature().toString()); Expression function = functionCall("max", value); selectList.add(new SingleColumn(function, new Identifier(column.getName()))); wrappedList.add(unaliasedName(column.getName())); } Query query = simpleQuery( selectAll(selectList.build()), from(table.getCatalogName(), TABLE_INTERNAL_PARTITIONS), Optional.of(logicalAnd( equal(identifier("table_schema"), new StringLiteral(table.getSchemaName())), equal(identifier("table_name"), new StringLiteral(table.getObjectName())))), Optional.of(new GroupBy(false, ImmutableList.of(new SimpleGroupBy(ImmutableList.of(identifier("partition_number")))))), Optional.empty(), Optional.empty(), Optional.empty()); return simpleQuery( selectAll(wrappedList.build()), subquery(query), showPartitions.getWhere(), Optional.empty(), Optional.empty(), Optional.of(new OrderBy(ImmutableList.<SortItem>builder() .addAll(showPartitions.getOrderBy()) .add(ascending("partition_number")) .build())), showPartitions.getLimit()); } @Override protected Node visitShowCreate(ShowCreate node, Void context) { QualifiedObjectName objectName = createQualifiedObjectName(session, node, node.getName()); Optional<ViewDefinition> viewDefinition = metadata.getView(session, objectName); if (node.getType() == VIEW) { if (!viewDefinition.isPresent()) { if (metadata.getTableHandle(session, objectName).isPresent()) { throw new SemanticException(NOT_SUPPORTED, node, "Relation '%s' is a table, not a view", objectName); } throw new SemanticException(MISSING_TABLE, node, "View '%s' does not exist", objectName); } Query query = parseView(viewDefinition.get().getOriginalSql(), objectName, node); String sql = formatSql(new CreateView(createQualifiedName(objectName), query, false), Optional.of(parameters)).trim(); return singleValueQuery("Create View", sql); } if (node.getType() == TABLE) { if (viewDefinition.isPresent()) { throw new SemanticException(NOT_SUPPORTED, node, "Relation '%s' is a view, not a table", objectName); } Optional<TableHandle> tableHandle = metadata.getTableHandle(session, objectName); if (!tableHandle.isPresent()) { throw new SemanticException(MISSING_TABLE, node, "Table '%s' does not exist", objectName); } ConnectorTableMetadata connectorTableMetadata = metadata.getTableMetadata(session, tableHandle.get()).getMetadata(); List<TableElement> columns = connectorTableMetadata.getColumns().stream() .filter(column -> !column.isHidden()) .map(column -> new ColumnDefinition(new Identifier(column.getName()), column.getType().getDisplayName(), Optional.ofNullable(column.getComment()))) .collect(toImmutableList()); Map<String, Object> properties = connectorTableMetadata.getProperties(); Map<String, PropertyMetadata<?>> allTableProperties = metadata.getTablePropertyManager().getAllProperties().get(tableHandle.get().getConnectorId()); Map<String, Expression> sqlProperties = new HashMap<>(); for (Map.Entry<String, Object> propertyEntry : properties.entrySet()) { String propertyName = propertyEntry.getKey(); Object value = propertyEntry.getValue(); if (value == null) { throw new PrestoException(INVALID_TABLE_PROPERTY, format("Property %s for table %s cannot have a null value", propertyName, objectName)); } PropertyMetadata<?> property = allTableProperties.get(propertyName); if (!property.getJavaType().isInstance(value)) { throw new PrestoException(INVALID_TABLE_PROPERTY, format( "Property %s for table %s should have value of type %s, not %s", propertyName, objectName, property.getJavaType().getName(), value.getClass().getName())); } Expression sqlExpression = getExpression(property, value); sqlProperties.put(propertyName, sqlExpression); } CreateTable createTable = new CreateTable( QualifiedName.of(objectName.getCatalogName(), objectName.getSchemaName(), objectName.getObjectName()), columns, false, sqlProperties, connectorTableMetadata.getComment()); return singleValueQuery("Create Table", formatSql(createTable, Optional.of(parameters)).trim()); } throw new UnsupportedOperationException("SHOW CREATE only supported for tables and views"); } @Override protected Node visitShowFunctions(ShowFunctions node, Void context) { ImmutableList.Builder<Expression> rows = ImmutableList.builder(); for (SqlFunction function : metadata.listFunctions()) { rows.add(row( new StringLiteral(function.getSignature().getName()), new StringLiteral(function.getSignature().getReturnType().toString()), new StringLiteral(Joiner.on(", ").join(function.getSignature().getArgumentTypes())), new StringLiteral(getFunctionType(function)), function.isDeterministic() ? TRUE_LITERAL : FALSE_LITERAL, new StringLiteral(nullToEmpty(function.getDescription())))); } Map<String, String> columns = ImmutableMap.<String, String>builder() .put("function_name", "Function") .put("return_type", "Return Type") .put("argument_types", "Argument Types") .put("function_type", "Function Type") .put("deterministic", "Deterministic") .put("description", "Description") .build(); return simpleQuery( selectAll(columns.entrySet().stream() .map(entry -> aliasedName(entry.getKey(), entry.getValue())) .collect(toImmutableList())), aliased(new Values(rows.build()), "functions", ImmutableList.copyOf(columns.keySet())), ordering( ascending("function_name"), ascending("return_type"), ascending("argument_types"), ascending("function_type"))); } private static String getFunctionType(SqlFunction function) { FunctionKind kind = function.getSignature().getKind(); switch (kind) { case AGGREGATE: return "aggregate"; case WINDOW: return "window"; case SCALAR: return "scalar"; } throw new IllegalArgumentException("Unsupported function kind: " + kind); } @Override protected Node visitShowSession(ShowSession node, Void context) { ImmutableList.Builder<Expression> rows = ImmutableList.builder(); SortedMap<String, ConnectorId> catalogNames = listCatalogs(session, metadata, accessControl); List<SessionPropertyValue> sessionProperties = metadata.getSessionPropertyManager().getAllSessionProperties(session, catalogNames); for (SessionPropertyValue sessionProperty : sessionProperties) { if (sessionProperty.isHidden()) { continue; } String value = sessionProperty.getValue(); String defaultValue = sessionProperty.getDefaultValue(); rows.add(row( new StringLiteral(sessionProperty.getFullyQualifiedName()), new StringLiteral(nullToEmpty(value)), new StringLiteral(nullToEmpty(defaultValue)), new StringLiteral(sessionProperty.getType()), new StringLiteral(sessionProperty.getDescription()), TRUE_LITERAL)); } // add bogus row so we can support empty sessions StringLiteral empty = new StringLiteral(""); rows.add(row(empty, empty, empty, empty, empty, FALSE_LITERAL)); return simpleQuery( selectList( aliasedName("name", "Name"), aliasedName("value", "Value"), aliasedName("default", "Default"), aliasedName("type", "Type"), aliasedName("description", "Description")), aliased( new Values(rows.build()), "session", ImmutableList.of("name", "value", "default", "type", "description", "include")), identifier("include")); } private Query parseView(String view, QualifiedObjectName name, Node node) { try { Statement statement = sqlParser.createStatement(view); return (Query) statement; } catch (ParsingException e) { throw new SemanticException(VIEW_PARSE_ERROR, node, "Failed parsing stored view '%s': %s", name, e.getMessage()); } } private static Relation from(String catalog, SchemaTableName table) { return table(QualifiedName.of(catalog, table.getSchemaName(), table.getTableName())); } @Override protected Node visitNode(Node node, Void context) { return node; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.solr.client.solrj.request.json; import java.io.File; import java.util.ArrayList; import java.util.List; import org.apache.solr.client.solrj.request.AbstractUpdateRequest; import org.apache.solr.client.solrj.request.CollectionAdminRequest; import org.apache.solr.client.solrj.request.ContentStreamUpdateRequest; import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.client.solrj.response.UpdateResponse; import org.apache.solr.client.solrj.response.json.BucketJsonFacet; import org.apache.solr.client.solrj.response.json.NestableJsonFacet; import org.apache.solr.cloud.SolrCloudTestCase; import org.apache.solr.common.SolrDocumentList; import org.apache.solr.util.ExternalPaths; import org.junit.BeforeClass; import org.junit.Test; public class DirectJsonQueryRequestFacetingIntegrationTest extends SolrCloudTestCase { private static final String COLLECTION_NAME = "techproducts"; private static final String CONFIG_NAME = "techproducts_config"; private static final int NUM_TECHPRODUCTS_DOCS = 32; private static final int NUM_IN_STOCK = 17; private static final int NUM_ELECTRONICS = 12; private static final int NUM_CURRENCY = 4; private static final int NUM_MEMORY = 3; private static final int NUM_CORSAIR = 3; private static final int NUM_BELKIN = 2; private static final int NUM_CANON = 2; @BeforeClass public static void setupCluster() throws Exception { configureCluster(1) .addConfig(CONFIG_NAME, new File(ExternalPaths.TECHPRODUCTS_CONFIGSET).toPath()) .configure(); final List<String> solrUrls = new ArrayList<>(); solrUrls.add(cluster.getJettySolrRunner(0).getBaseUrl().toString()); CollectionAdminRequest.createCollection(COLLECTION_NAME, CONFIG_NAME, 1, 1) .setPerReplicaState(SolrCloudTestCase.USE_PER_REPLICA_STATE) .process(cluster.getSolrClient()); ContentStreamUpdateRequest up = new ContentStreamUpdateRequest("/update"); up.setParam("collection", COLLECTION_NAME); up.addFile(getFile("solrj/techproducts.xml"), "application/xml"); up.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true); UpdateResponse updateResponse = up.process(cluster.getSolrClient()); assertEquals(0, updateResponse.getStatus()); } @Test public void testSingleTermsFacet() throws Exception { final String jsonBody = String.join( "\n", "{", " 'query': '*:*',", " 'facet': {", " 'top_cats': {", " 'type': 'terms',", " 'field': 'cat',", " 'limit': 3", " }", " }", "}"); final DirectJsonQueryRequest request = new DirectJsonQueryRequest(jsonBody); QueryResponse response = request.process(cluster.getSolrClient(), COLLECTION_NAME); assertExpectedDocumentsFoundAndReturned(response, NUM_TECHPRODUCTS_DOCS, 10); final NestableJsonFacet topLevelFacetData = response.getJsonFacetingResponse(); assertEquals(NUM_TECHPRODUCTS_DOCS, topLevelFacetData.getCount()); assertHasFacetWithBucketValues( topLevelFacetData, "top_cats", new FacetBucket("electronics", NUM_ELECTRONICS), new FacetBucket("currency", NUM_CURRENCY), new FacetBucket("memory", NUM_MEMORY)); } @Test public void testMultiTermsFacet() throws Exception { final String jsonBody = String.join( "\n", "{", " 'query': '*:*',", " 'facet': {", " 'top_cats': {", " 'type': 'terms',", " 'field': 'cat',", " 'limit': 3", " },", " 'top_manufacturers': {", " 'type': 'terms',", " 'field': 'manu_id_s',", " 'limit': 3", " }", " }", "}"); final DirectJsonQueryRequest request = new DirectJsonQueryRequest(jsonBody); QueryResponse response = request.process(cluster.getSolrClient(), COLLECTION_NAME); assertExpectedDocumentsFoundAndReturned(response, NUM_TECHPRODUCTS_DOCS, 10); final NestableJsonFacet topLevelFacetData = response.getJsonFacetingResponse(); assertEquals(NUM_TECHPRODUCTS_DOCS, topLevelFacetData.getCount()); assertHasFacetWithBucketValues( topLevelFacetData, "top_cats", new FacetBucket("electronics", NUM_ELECTRONICS), new FacetBucket("currency", NUM_CURRENCY), new FacetBucket("memory", NUM_MEMORY)); assertHasFacetWithBucketValues( topLevelFacetData, "top_manufacturers", new FacetBucket("corsair", NUM_CORSAIR), new FacetBucket("belkin", NUM_BELKIN), new FacetBucket("canon", NUM_CANON)); } @Test public void testSingleRangeFacet() throws Exception { final String jsonBody = String.join( "\n", "{", " 'query': '*:*',", " 'facet': {", " 'prices': {", " 'type': 'range',", " 'field': 'price',", " 'start': 0,", " 'end': 100,", " 'gap': 20", " }", " }", "}"); final DirectJsonQueryRequest request = new DirectJsonQueryRequest(jsonBody); QueryResponse response = request.process(cluster.getSolrClient(), COLLECTION_NAME); assertExpectedDocumentsFoundAndReturned(response, NUM_TECHPRODUCTS_DOCS, 10); final NestableJsonFacet topLevelFacetData = response.getJsonFacetingResponse(); assertEquals(NUM_TECHPRODUCTS_DOCS, topLevelFacetData.getCount()); assertHasFacetWithBucketValues( topLevelFacetData, "prices", new FacetBucket(0.0f, 5), new FacetBucket(20.0f, 0), new FacetBucket(40.0f, 0), new FacetBucket(60.0f, 1), new FacetBucket(80.0f, 1)); } @Test public void testMultiRangeFacet() throws Exception { final String jsonBody = String.join( "\n", "{", " 'query': '*:*',", " 'facet': {", " 'prices': {", " 'type': 'range',", " 'field': 'price',", " 'start': 0,", " 'end': 100,", " 'gap': 20", " },", " 'shipping_weights': {", " 'type': 'range',", " 'field': 'weight',", " 'start': 0,", " 'end': 200,", " 'gap': 50", " }", " }", "}"); final DirectJsonQueryRequest request = new DirectJsonQueryRequest(jsonBody); QueryResponse response = request.process(cluster.getSolrClient(), COLLECTION_NAME); assertExpectedDocumentsFoundAndReturned(response, NUM_TECHPRODUCTS_DOCS, 10); final NestableJsonFacet topLevelFacetData = response.getJsonFacetingResponse(); assertEquals(NUM_TECHPRODUCTS_DOCS, topLevelFacetData.getCount()); assertHasFacetWithBucketValues( topLevelFacetData, "prices", new FacetBucket(0.0f, 5), new FacetBucket(20.0f, 0), new FacetBucket(40.0f, 0), new FacetBucket(60.0f, 1), new FacetBucket(80.0f, 1)); assertHasFacetWithBucketValues( topLevelFacetData, "shipping_weights", new FacetBucket(0.0f, 6), new FacetBucket(50.0f, 0), new FacetBucket(100.0f, 0), new FacetBucket(150.0f, 1)); } @Test public void testSingleStatFacet() throws Exception { final String jsonBody = String.join( "\n", "{", " 'query': '*:*',", " 'facet': {", " 'sum_price': 'sum(price)'", " }", "}"); final DirectJsonQueryRequest request = new DirectJsonQueryRequest(jsonBody); QueryResponse response = request.process(cluster.getSolrClient(), COLLECTION_NAME); assertExpectedDocumentsFoundAndReturned(response, NUM_TECHPRODUCTS_DOCS, 10); final NestableJsonFacet topLevelFacetData = response.getJsonFacetingResponse(); assertHasStatFacetWithValue(topLevelFacetData, "sum_price", 5251.270030975342); } @Test public void testMultiStatFacet() throws Exception { final String jsonBody = String.join( "\n", "{", " 'query': '*:*',", " 'facet': {", " 'sum_price': 'sum(price)',", " 'avg_price': 'avg(price)'", " }", "}"); final DirectJsonQueryRequest request = new DirectJsonQueryRequest(jsonBody); QueryResponse response = request.process(cluster.getSolrClient(), COLLECTION_NAME); assertExpectedDocumentsFoundAndReturned(response, NUM_TECHPRODUCTS_DOCS, 10); final NestableJsonFacet topLevelFacetData = response.getJsonFacetingResponse(); assertHasStatFacetWithValue(topLevelFacetData, "sum_price", 5251.270030975342); assertHasStatFacetWithValue(topLevelFacetData, "avg_price", 328.20437693595886); } @Test public void testMultiFacetsMixedTypes() throws Exception { final String jsonBody = String.join( "\n", "{", " 'query': '*:*',", " 'facet': {", " 'avg_price': 'avg(price)',", " 'top_cats': {", " 'type': 'terms',", " 'field': 'cat',", " 'limit': 3", " }", " }", "}"); final DirectJsonQueryRequest request = new DirectJsonQueryRequest(jsonBody); QueryResponse response = request.process(cluster.getSolrClient(), COLLECTION_NAME); assertExpectedDocumentsFoundAndReturned(response, NUM_TECHPRODUCTS_DOCS, 10); final NestableJsonFacet topLevelFacetData = response.getJsonFacetingResponse(); assertHasStatFacetWithValue(topLevelFacetData, "avg_price", 328.20437693595886); assertHasFacetWithBucketValues( topLevelFacetData, "top_cats", new FacetBucket("electronics", NUM_ELECTRONICS), new FacetBucket("currency", NUM_CURRENCY), new FacetBucket("memory", NUM_MEMORY)); } @Test public void testNestedTermsFacet() throws Exception { final String subfacetName = "top_manufacturers_for_cat"; final String jsonBody = String.join( "\n", "{", " 'query': '*:*',", " 'facet': {", " 'top_cats': {", " 'type': 'terms',", " 'field': 'cat',", " 'limit': 3", " 'facet': {", " 'top_manufacturers_for_cat': {", " 'type': 'terms',", " 'field': 'manu_id_s',", " 'limit': 1", " }", " }", " }", " }", "}"); final DirectJsonQueryRequest request = new DirectJsonQueryRequest(jsonBody); QueryResponse response = request.process(cluster.getSolrClient(), COLLECTION_NAME); assertExpectedDocumentsFoundAndReturned(response, NUM_TECHPRODUCTS_DOCS, 10); final NestableJsonFacet topLevelFacetData = response.getJsonFacetingResponse(); // Test top level facets assertHasFacetWithBucketValues( topLevelFacetData, "top_cats", new FacetBucket("electronics", NUM_ELECTRONICS), new FacetBucket("currency", NUM_CURRENCY), new FacetBucket("memory", NUM_MEMORY)); // Test subfacet values for each top-level facet bucket final List<BucketJsonFacet> catBuckets = topLevelFacetData.getBucketBasedFacets("top_cats").getBuckets(); assertHasFacetWithBucketValues(catBuckets.get(0), subfacetName, new FacetBucket("corsair", 3)); assertHasFacetWithBucketValues(catBuckets.get(1), subfacetName, new FacetBucket("boa", 1)); assertHasFacetWithBucketValues(catBuckets.get(2), subfacetName, new FacetBucket("corsair", 3)); } @Test public void testNestedFacetsOfMixedTypes() throws Exception { final String subfacetName = "avg_price_for_cat"; final String jsonBody = String.join( "\n", "{", " 'query': '*:*',", " 'facet': {", " 'top_cats': {", " 'type': 'terms',", " 'field': 'cat',", " 'limit': 3", " 'facet': {", " 'avg_price_for_cat': 'avg(price)'", " }", " }", " }", "}"); final DirectJsonQueryRequest request = new DirectJsonQueryRequest(jsonBody); QueryResponse response = request.process(cluster.getSolrClient(), COLLECTION_NAME); assertExpectedDocumentsFoundAndReturned(response, NUM_TECHPRODUCTS_DOCS, 10); final NestableJsonFacet topLevelFacetData = response.getJsonFacetingResponse(); // Test top level facets assertHasFacetWithBucketValues( topLevelFacetData, "top_cats", new FacetBucket("electronics", NUM_ELECTRONICS), new FacetBucket("currency", NUM_CURRENCY), new FacetBucket("memory", NUM_MEMORY)); // Test subfacet values for each top-level facet bucket final List<BucketJsonFacet> catBuckets = topLevelFacetData.getBucketBasedFacets("top_cats").getBuckets(); assertHasStatFacetWithValue(catBuckets.get(0), subfacetName, 252.02909261530095); // electronics assertHasStatFacetWithValue(catBuckets.get(1), subfacetName, 0.0); // currency assertHasStatFacetWithValue(catBuckets.get(2), subfacetName, 129.99499893188477); // memory } @Test public void testFacetWithDomainFilteredBySimpleQueryString() throws Exception { final String jsonBody = String.join( "\n", "{", " 'query': '*:*',", " 'facet': {", " 'top_popular_cats': {", " 'type': 'terms',", " 'field': 'cat',", " 'limit': 3", " 'domain': {", " 'filter': 'popularity:[5 TO 10]'", " }", " }", " }", "}"); final DirectJsonQueryRequest request = new DirectJsonQueryRequest(jsonBody); QueryResponse response = request.process(cluster.getSolrClient(), COLLECTION_NAME); assertExpectedDocumentsFoundAndReturned(response, NUM_TECHPRODUCTS_DOCS, 10); final NestableJsonFacet topLevelFacetData = response.getJsonFacetingResponse(); assertHasFacetWithBucketValues( topLevelFacetData, "top_popular_cats", new FacetBucket("electronics", 9), new FacetBucket("graphics card", 2), new FacetBucket("hard drive", 2)); } @Test public void testFacetWithDomainFilteredByLocalParamsQueryString() throws Exception { final String jsonBody = String.join( "\n", "{", " 'query': '*:*',", " 'facet': {", " 'top_popular_cats': {", " 'type': 'terms',", " 'field': 'cat',", " 'limit': 3", " 'domain': {", " 'filter': '{!lucene df=\"popularity\" v=\"[5 TO 10]\"}'", " }", " }", " }", "}"); final DirectJsonQueryRequest request = new DirectJsonQueryRequest(jsonBody); QueryResponse response = request.process(cluster.getSolrClient(), COLLECTION_NAME); assertExpectedDocumentsFoundAndReturned(response, NUM_TECHPRODUCTS_DOCS, 10); final NestableJsonFacet topLevelFacetData = response.getJsonFacetingResponse(); assertHasFacetWithBucketValues( topLevelFacetData, "top_popular_cats", new FacetBucket("electronics", 9), new FacetBucket("graphics card", 2), new FacetBucket("hard drive", 2)); } @Test public void testFacetWithArbitraryDomainFromQueryString() throws Exception { final String jsonBody = String.join( "\n", "{", " 'query': 'cat:electronics',", " 'facet': {", " 'top_cats': {", " 'type': 'terms',", " 'field': 'cat',", " 'limit': 3", " 'domain': {", " 'query': '*:*'", " }", " }", " }", "}"); final DirectJsonQueryRequest request = new DirectJsonQueryRequest(jsonBody); QueryResponse response = request.process(cluster.getSolrClient(), COLLECTION_NAME); assertExpectedDocumentsFoundAndReturned(response, NUM_ELECTRONICS, 10); final NestableJsonFacet topLevelFacetData = response.getJsonFacetingResponse(); assertHasFacetWithBucketValues( topLevelFacetData, "top_cats", new FacetBucket("electronics", NUM_ELECTRONICS), new FacetBucket("currency", NUM_CURRENCY), new FacetBucket("memory", NUM_MEMORY)); } @Test public void testFacetWithArbitraryDomainFromLocalParamsQuery() throws Exception { final String jsonBody = String.join( "\n", "{", " 'query': 'cat:electronics',", " 'facet': {", " 'largest_search_cats': {", " 'type': 'terms',", " 'field': 'cat',", " 'domain': {", " 'query': '{!lucene df=\"cat\" v=\"search\"}'", " }", " }", " }", "}"); final DirectJsonQueryRequest request = new DirectJsonQueryRequest(jsonBody); QueryResponse response = request.process(cluster.getSolrClient(), COLLECTION_NAME); assertExpectedDocumentsFoundAndReturned(response, NUM_ELECTRONICS, 10); final NestableJsonFacet topLevelFacetData = response.getJsonFacetingResponse(); assertHasFacetWithBucketValues( topLevelFacetData, "largest_search_cats", new FacetBucket("search", 2), new FacetBucket("software", 2)); } @Test public void testFacetWithMultipleSimpleQueryClausesInArbitraryDomain() throws Exception { final String jsonBody = String.join( "\n", "{", " 'query': 'cat:electronics',", " 'facet': {", " 'cats_matching_solr': {", " 'type': 'terms',", " 'field': 'cat',", " 'domain': {", " 'query': ['cat:search', 'name:Solr']", " }", " }", " }", "}"); final DirectJsonQueryRequest request = new DirectJsonQueryRequest(jsonBody); QueryResponse response = request.process(cluster.getSolrClient(), COLLECTION_NAME); assertExpectedDocumentsFoundAndReturned(response, NUM_ELECTRONICS, 10); final NestableJsonFacet topLevelFacetData = response.getJsonFacetingResponse(); assertHasFacetWithBucketValues( topLevelFacetData, "cats_matching_solr", new FacetBucket("search", 1), new FacetBucket("software", 1)); } @Test public void testFacetWithMultipleLocalParamsQueryClausesInArbitraryDomain() throws Exception { final String jsonBody = String.join( "\n", "{", " 'query': 'cat:electronics',", " 'facet': {", " 'cats_matching_solr': {", " 'type': 'terms',", " 'field': 'cat',", " 'domain': {", " 'query': ['{!lucene df=\"cat\" v=\"search\"}', '{!lucene df=\"name\" v=\"Solr\"}']", " }", " }", " }", "}"); final DirectJsonQueryRequest request = new DirectJsonQueryRequest(jsonBody); QueryResponse response = request.process(cluster.getSolrClient(), COLLECTION_NAME); assertExpectedDocumentsFoundAndReturned(response, NUM_ELECTRONICS, 10); final NestableJsonFacet topLevelFacetData = response.getJsonFacetingResponse(); assertHasFacetWithBucketValues( topLevelFacetData, "cats_matching_solr", new FacetBucket("search", 1), new FacetBucket("software", 1)); } @Test public void testFacetWithDomainWidenedUsingExcludeTagsToIgnoreFilters() throws Exception { final String jsonBody = String.join( "\n", "{", " 'query': '*:*',", " 'filter': {'#on_shelf': 'inStock:true'},", " 'facet': {", " 'in_stock_only': {", " 'type': 'terms',", " 'field': 'cat',", " 'limit': 2", " }", " 'all': {", " 'type': 'terms',", " 'field': 'cat',", " 'limit': 2,", " 'domain': {", " 'excludeTags': 'on_shelf'", " }", " }", " }", "}"); final DirectJsonQueryRequest request = new DirectJsonQueryRequest(jsonBody); QueryResponse response = request.process(cluster.getSolrClient(), COLLECTION_NAME); assertExpectedDocumentsFoundAndReturned(response, NUM_IN_STOCK, 10); final NestableJsonFacet topLevelFacetData = response.getJsonFacetingResponse(); assertHasFacetWithBucketValues( topLevelFacetData, "in_stock_only", new FacetBucket("electronics", 8), new FacetBucket("currency", 4)); assertHasFacetWithBucketValues( topLevelFacetData, "all", new FacetBucket("electronics", 12), new FacetBucket("currency", 4)); } private class FacetBucket { private final Object val; private final int count; FacetBucket(Object val, int count) { this.val = val; this.count = count; } public Object getVal() { return val; } public int getCount() { return count; } } private void assertHasFacetWithBucketValues( NestableJsonFacet response, String expectedFacetName, FacetBucket... expectedBuckets) { assertTrue( "Expected response to have facet with name " + expectedFacetName, response.getBucketBasedFacets(expectedFacetName) != null); final List<BucketJsonFacet> buckets = response.getBucketBasedFacets(expectedFacetName).getBuckets(); assertEquals(expectedBuckets.length, buckets.size()); for (int i = 0; i < expectedBuckets.length; i++) { final FacetBucket expectedBucket = expectedBuckets[i]; final BucketJsonFacet actualBucket = buckets.get(i); assertEquals(expectedBucket.getVal(), actualBucket.getVal()); assertEquals(expectedBucket.getCount(), actualBucket.getCount()); } } private void assertHasStatFacetWithValue( NestableJsonFacet response, String expectedFacetName, Double expectedStatValue) { assertTrue( "Expected response to have stat facet named '" + expectedFacetName + "'", response.getStatValue(expectedFacetName) != null); assertEquals(expectedStatValue, response.getStatValue(expectedFacetName)); } private void assertExpectedDocumentsFoundAndReturned( QueryResponse response, int expectedNumFound, int expectedReturned) { assertEquals(0, response.getStatus()); final SolrDocumentList documents = response.getResults(); assertEquals(expectedNumFound, documents.getNumFound()); assertEquals(expectedReturned, documents.size()); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.tajo.engine.planner; import org.apache.tajo.catalog.Schema; import org.apache.tajo.catalog.SortSpec; import org.apache.tajo.common.TajoDataTypes.Type; import org.apache.tajo.datum.DatumFactory; import org.apache.tajo.storage.Tuple; import org.apache.tajo.storage.TupleRange; import org.apache.tajo.storage.VTuple; import org.junit.Test; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; public class TestUniformRangePartition { /** * It verify overflow and increment. */ @Test public void testIncrement1() { Schema schema = new Schema() .addColumn("l_returnflag", Type.TEXT) .addColumn("l_linestatus", Type.TEXT); SortSpec[] sortSpecs = PlannerUtil.schemaToSortSpecs(schema); Tuple s = new VTuple(2); s.put(0, DatumFactory.createText("A")); s.put(1, DatumFactory.createText("A")); Tuple e = new VTuple(2); e.put(0, DatumFactory.createText("D")); e.put(1, DatumFactory.createText("C")); TupleRange expected = new TupleRange(sortSpecs, s, e); UniformRangePartition partitioner = new UniformRangePartition(expected, sortSpecs); assertEquals(12, partitioner.getTotalCardinality().intValue()); String [] result = new String[12]; result[0] = "AA"; result[1] = "AB"; result[2] = "AC"; result[3] = "BA"; result[4] = "BB"; result[5] = "BC"; result[6] = "CA"; result[7] = "CB"; result[8] = "CC"; result[9] = "DA"; result[10] = "DB"; result[11] = "DC"; Tuple end = partitioner.increment(s, 1, 1); assertEquals("A", end.get(0).asChars()); assertEquals("B", end.get(1).asChars()); for (int i = 2; i < 11; i++ ) { end = partitioner.increment(end, 1, 1); assertEquals(result[i].charAt(0), end.get(0).asChars().charAt(0)); assertEquals(result[i].charAt(1), end.get(1).asChars().charAt(0)); } } /** * It verify overflow with the number that exceeds the last digit. */ @Test public void testIncrement2() { Schema schema = new Schema() .addColumn("l_returnflag", Type.TEXT) .addColumn("l_linestatus", Type.TEXT); SortSpec [] sortSpecs = PlannerUtil.schemaToSortSpecs(schema); Tuple s = new VTuple(2); s.put(0, DatumFactory.createText("A")); s.put(1, DatumFactory.createText("A")); Tuple e = new VTuple(2); e.put(0, DatumFactory.createText("D")); e.put(1, DatumFactory.createText("C")); TupleRange expected = new TupleRange(sortSpecs, s, e); UniformRangePartition partitioner = new UniformRangePartition(expected, sortSpecs); assertEquals(12, partitioner.getTotalCardinality().intValue()); String [] result = new String[12]; result[0] = "AA"; result[1] = "AB"; result[2] = "AC"; result[3] = "BA"; result[4] = "BB"; result[5] = "BC"; result[6] = "CA"; result[7] = "CB"; result[8] = "CC"; result[9] = "DA"; result[10] = "DB"; result[11] = "DC"; Tuple end = partitioner.increment(s, 6, 1); assertEquals("C", end.get(0).asChars()); assertEquals("A", end.get(1).asChars()); end = partitioner.increment(end, 5, 1); assertEquals("D", end.get(0).asChars()); assertEquals("C", end.get(1).asChars()); } /** * It verify the case where two or more digits are overflow. */ @Test public void testIncrement3() { Schema schema = new Schema() .addColumn("l_returnflag", Type.TEXT) .addColumn("l_linestatus", Type.TEXT) .addColumn("final", Type.TEXT); SortSpec [] sortSpecs = PlannerUtil.schemaToSortSpecs(schema); Tuple s = new VTuple(3); s.put(0, DatumFactory.createText("A")); s.put(1, DatumFactory.createText("A")); s.put(2, DatumFactory.createText("A")); Tuple e = new VTuple(3); e.put(0, DatumFactory.createText("D")); // 4 e.put(1, DatumFactory.createText("B")); // 2 e.put(2, DatumFactory.createText("C")); // x3 = 24 TupleRange expected = new TupleRange(sortSpecs, s, e); UniformRangePartition partitioner = new UniformRangePartition(expected, sortSpecs); assertEquals(24, partitioner.getTotalCardinality().intValue()); Tuple overflowBefore = partitioner.increment(s, 5, 2); assertEquals("A", overflowBefore.get(0).asChars()); assertEquals("B", overflowBefore.get(1).asChars()); assertEquals("C", overflowBefore.get(2).asChars()); Tuple overflowed = partitioner.increment(overflowBefore, 1, 2); assertEquals("B", overflowed.get(0).asChars()); assertEquals("A", overflowed.get(1).asChars()); assertEquals("A", overflowed.get(2).asChars()); } @Test public void testIncrement4() { Schema schema = new Schema() .addColumn("l_orderkey", Type.INT8) .addColumn("l_linenumber", Type.INT8); SortSpec [] sortSpecs = PlannerUtil.schemaToSortSpecs(schema); Tuple s = new VTuple(2); s.put(0, DatumFactory.createInt8(10)); s.put(1, DatumFactory.createInt8(20)); Tuple e = new VTuple(2); e.put(0, DatumFactory.createInt8(19)); e.put(1, DatumFactory.createInt8(39)); TupleRange expected = new TupleRange(sortSpecs, s, e); UniformRangePartition partitioner = new UniformRangePartition(expected, sortSpecs); assertEquals(200, partitioner.getTotalCardinality().longValue()); Tuple range2 = partitioner.increment(s, 100, 1); assertEquals(15, range2.get(0).asInt4()); assertEquals(20, range2.get(1).asInt4()); Tuple range3 = partitioner.increment(range2, 99, 1); assertEquals(19, range3.get(0).asInt4()); assertEquals(39, range3.get(1).asInt4()); } @Test public void testIncrement5() { Schema schema = new Schema() .addColumn("l_orderkey", Type.INT8) .addColumn("l_linenumber", Type.INT8) .addColumn("final", Type.INT8); SortSpec [] sortSpecs = PlannerUtil.schemaToSortSpecs(schema); Tuple s = new VTuple(3); s.put(0, DatumFactory.createInt8(1)); s.put(1, DatumFactory.createInt8(1)); s.put(2, DatumFactory.createInt8(1)); Tuple e = new VTuple(3); e.put(0, DatumFactory.createInt8(4)); // 4 e.put(1, DatumFactory.createInt8(2)); // 2 e.put(2, DatumFactory.createInt8(3)); //x3 = 24 TupleRange expected = new TupleRange(sortSpecs, s, e); UniformRangePartition partitioner = new UniformRangePartition(expected, sortSpecs); assertEquals(24, partitioner.getTotalCardinality().longValue()); Tuple beforeOverflow = partitioner.increment(s, 5, 2); assertEquals(1, beforeOverflow.get(0).asInt8()); assertEquals(2, beforeOverflow.get(1).asInt8()); assertEquals(3, beforeOverflow.get(2).asInt8()); Tuple overflow = partitioner.increment(beforeOverflow, 1, 2); assertEquals(2, overflow.get(0).asInt8()); assertEquals(1, overflow.get(1).asInt8()); assertEquals(1, overflow.get(2).asInt8()); } @Test public void testIncrement6() { Schema schema = new Schema() .addColumn("l_orderkey", Type.FLOAT8) .addColumn("l_linenumber", Type.FLOAT8) .addColumn("final", Type.FLOAT8); SortSpec [] sortSpecs = PlannerUtil.schemaToSortSpecs(schema); Tuple s = new VTuple(3); s.put(0, DatumFactory.createFloat8(1.1d)); s.put(1, DatumFactory.createFloat8(1.1d)); s.put(2, DatumFactory.createFloat8(1.1d)); Tuple e = new VTuple(3); e.put(0, DatumFactory.createFloat8(4.1d)); // 4 e.put(1, DatumFactory.createFloat8(2.1d)); // 2 e.put(2, DatumFactory.createFloat8(3.1d)); //x3 = 24 TupleRange expected = new TupleRange(sortSpecs, s, e); UniformRangePartition partitioner = new UniformRangePartition(expected, sortSpecs); assertEquals(24, partitioner.getTotalCardinality().longValue()); Tuple beforeOverflow = partitioner.increment(s, 5, 2); assertTrue(1.1d == beforeOverflow.get(0).asFloat8()); assertTrue(2.1d == beforeOverflow.get(1).asFloat8()); assertTrue(3.1d == beforeOverflow.get(2).asFloat8()); Tuple overflow = partitioner.increment(beforeOverflow, 1, 2); assertTrue(2.1d == overflow.get(0).asFloat8()); assertTrue(1.1d == overflow.get(1).asFloat8()); assertTrue(1.1d == overflow.get(2).asFloat8()); } @Test public void testIncrement7() { Schema schema = new Schema() .addColumn("l_orderkey", Type.INET4) .addColumn("l_linenumber", Type.INET4) .addColumn("final", Type.INET4); SortSpec [] sortSpecs = PlannerUtil.schemaToSortSpecs(schema); Tuple s = new VTuple(3); s.put(0, DatumFactory.createInet4("127.0.1.1")); s.put(1, DatumFactory.createInet4("127.0.0.1")); s.put(2, DatumFactory.createInet4("128.0.0.253")); Tuple e = new VTuple(3); e.put(0, DatumFactory.createInet4("127.0.1.4")); // 4 e.put(1, DatumFactory.createInet4("127.0.0.2")); // 2 e.put(2, DatumFactory.createInet4("128.0.0.255")); //x3 = 24 TupleRange expected = new TupleRange(sortSpecs, s, e); UniformRangePartition partitioner = new UniformRangePartition(expected, sortSpecs); assertEquals(24, partitioner.getTotalCardinality().longValue()); Tuple beforeOverflow = partitioner.increment(s, 5, 2); assertTrue("127.0.1.1".equals(beforeOverflow.get(0).asChars())); assertTrue("127.0.0.2".equals(beforeOverflow.get(1).asChars())); assertTrue("128.0.0.255".equals(beforeOverflow.get(2).asChars())); Tuple overflow = partitioner.increment(beforeOverflow, 1, 2); assertTrue("127.0.1.2".equals(overflow.get(0).asChars())); assertTrue("127.0.0.1".equals(overflow.get(1).asChars())); assertTrue("128.0.0.253".equals(overflow.get(2).asChars())); } @Test public void testPartition() { Schema schema = new Schema(); schema.addColumn("l_returnflag", Type.TEXT); schema.addColumn("l_linestatus", Type.TEXT); SortSpec [] sortSpecs = PlannerUtil.schemaToSortSpecs(schema); Tuple s = new VTuple(2); s.put(0, DatumFactory.createText("A")); s.put(1, DatumFactory.createText("F")); Tuple e = new VTuple(2); e.put(0, DatumFactory.createText("R")); e.put(1, DatumFactory.createText("O")); TupleRange expected = new TupleRange(sortSpecs, s, e); RangePartitionAlgorithm partitioner = new UniformRangePartition(expected, sortSpecs, true); TupleRange [] ranges = partitioner.partition(31); TupleRange prev = null; for (TupleRange r : ranges) { if (prev == null) { prev = r; } else { assertTrue(prev.compareTo(r) < 0); } } } @Test public void testPartitionForOnePartNum() { Schema schema = new Schema() .addColumn("l_returnflag", Type.TEXT) .addColumn("l_linestatus", Type.TEXT); SortSpec [] sortSpecs = PlannerUtil.schemaToSortSpecs(schema); Tuple s = new VTuple(2); s.put(0, DatumFactory.createText("A")); s.put(1, DatumFactory.createText("F")); Tuple e = new VTuple(2); e.put(0, DatumFactory.createText("R")); e.put(1, DatumFactory.createText("O")); TupleRange expected = new TupleRange(sortSpecs, s, e); RangePartitionAlgorithm partitioner = new UniformRangePartition(expected, sortSpecs, true); TupleRange [] ranges = partitioner.partition(1); assertEquals(expected, ranges[0]); } @Test public void testPartitionForOnePartNumWithOneOfTheValueNull() { Schema schema = new Schema() .addColumn("l_returnflag", Type.TEXT) .addColumn("l_linestatus", Type.TEXT); SortSpec [] sortSpecs = PlannerUtil.schemaToSortSpecs(schema); Tuple s = new VTuple(2); s.put(0, DatumFactory.createNullDatum()); s.put(1, DatumFactory.createText("F")); Tuple e = new VTuple(2); e.put(0, DatumFactory.createText("R")); e.put(1, DatumFactory.createNullDatum()); TupleRange expected = new TupleRange(sortSpecs, s, e); RangePartitionAlgorithm partitioner = new UniformRangePartition(expected, sortSpecs, true); TupleRange [] ranges = partitioner.partition(1); assertEquals(expected, ranges[0]); } @Test public void testPartitionForOnePartNumWithBothValueNull() { Schema schema = new Schema() .addColumn("l_returnflag", Type.TEXT) .addColumn("l_linestatus", Type.TEXT); SortSpec [] sortSpecs = PlannerUtil.schemaToSortSpecs(schema); Tuple s = new VTuple(2); s.put(0, DatumFactory.createNullDatum()); s.put(1, DatumFactory.createNullDatum()); Tuple e = new VTuple(2); e.put(0, DatumFactory.createNullDatum()); e.put(1, DatumFactory.createNullDatum()); TupleRange expected = new TupleRange(sortSpecs, s, e); RangePartitionAlgorithm partitioner = new UniformRangePartition(expected, sortSpecs, true); TupleRange [] ranges = partitioner.partition(1); assertEquals(expected, ranges[0]); } @Test public void testPartitionWithNull() { Schema schema = new Schema(); schema.addColumn("l_returnflag", Type.TEXT); schema.addColumn("l_linestatus", Type.TEXT); SortSpec [] sortSpecs = PlannerUtil.schemaToSortSpecs(schema); Tuple s = new VTuple(2); s.put(0, DatumFactory.createNullDatum()); s.put(1, DatumFactory.createText("F")); Tuple e = new VTuple(2); e.put(0, DatumFactory.createNullDatum()); e.put(1, DatumFactory.createText("O")); TupleRange expected = new TupleRange(sortSpecs, s, e); RangePartitionAlgorithm partitioner = new UniformRangePartition(expected, sortSpecs, true); TupleRange [] ranges = partitioner.partition(10); TupleRange prev = null; for (TupleRange r : ranges) { if (prev == null) { prev = r; } else { assertTrue(prev.compareTo(r) > 0); } } } @Test public void testPartitionWithINET4() { Schema schema = new Schema(); schema.addColumn("l_returnflag", Type.INET4); schema.addColumn("l_linestatus", Type.INET4); SortSpec [] sortSpecs = PlannerUtil.schemaToSortSpecs(schema); Tuple s = new VTuple(2); s.put(0, DatumFactory.createInet4("127.0.1.10")); s.put(1, DatumFactory.createInet4("127.0.2.10")); Tuple e = new VTuple(2); e.put(0, DatumFactory.createInet4("127.0.1.20")); e.put(1, DatumFactory.createInet4("127.0.2.20")); TupleRange expected = new TupleRange(sortSpecs, s, e); RangePartitionAlgorithm partitioner = new UniformRangePartition(expected, sortSpecs, true); TupleRange [] ranges = partitioner.partition(10); TupleRange prev = null; for (TupleRange r : ranges) { if (prev == null) { prev = r; } else { assertTrue(prev.compareTo(r) < 0); } } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.resources.gpu; import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableList; import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableMap; import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableSet; import org.apache.hadoop.util.Sets; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.ResourceInformation; import org.apache.hadoop.yarn.exceptions.ResourceNotFoundException; import org.apache.hadoop.yarn.server.nodemanager.Context; import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container; import org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.resources.ResourceHandlerException; import org.apache.hadoop.yarn.server.nodemanager.containermanager.resourceplugin.gpu.AssignedGpuDevice; import org.apache.hadoop.yarn.server.nodemanager.containermanager.resourceplugin.gpu.GpuDevice; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.io.Serializable; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeMap; import java.util.TreeSet; import java.util.stream.Collectors; import static org.apache.hadoop.yarn.api.records.ResourceInformation.GPU_URI; /** * Allocate GPU resources according to requirements. */ public class GpuResourceAllocator { final static Logger LOG = LoggerFactory. getLogger(GpuResourceAllocator.class); private static final int WAIT_MS_PER_LOOP = 1000; private Set<GpuDevice> allowedGpuDevices = new TreeSet<>(); private Map<GpuDevice, ContainerId> usedDevices = new TreeMap<>(); private Context nmContext; private final int waitPeriodForResource; public GpuResourceAllocator(Context ctx) { this.nmContext = ctx; // Wait for a maximum of 120 seconds if no available GPU are there which // are yet to be released. this.waitPeriodForResource = 120 * WAIT_MS_PER_LOOP; } @VisibleForTesting GpuResourceAllocator(Context ctx, int waitPeriodForResource) { this.nmContext = ctx; this.waitPeriodForResource = waitPeriodForResource; } /** * Contains allowed and denied devices. * Denied devices will be useful for cgroups devices module to do blacklisting */ static class GpuAllocation { private Set<GpuDevice> allowed = Collections.emptySet(); private Set<GpuDevice> denied = Collections.emptySet(); GpuAllocation(Set<GpuDevice> allowed, Set<GpuDevice> denied) { if (allowed != null) { this.allowed = ImmutableSet.copyOf(allowed); } if (denied != null) { this.denied = ImmutableSet.copyOf(denied); } } public Set<GpuDevice> getAllowedGPUs() { return allowed; } public Set<GpuDevice> getDeniedGPUs() { return denied; } } /** * Add GPU to the allowed list of GPUs. * @param gpuDevice gpu device */ public synchronized void addGpu(GpuDevice gpuDevice) { allowedGpuDevices.add(gpuDevice); } @VisibleForTesting public synchronized int getAvailableGpus() { return allowedGpuDevices.size() - usedDevices.size(); } public synchronized void recoverAssignedGpus(ContainerId containerId) throws ResourceHandlerException { Container c = nmContext.getContainers().get(containerId); if (c == null) { throw new ResourceHandlerException( "Cannot find container with id=" + containerId + ", this should not occur under normal circumstances!"); } LOG.info("Starting recovery of GpuDevice for {}.", containerId); for (Serializable gpuDeviceSerializable : c.getResourceMappings() .getAssignedResources(GPU_URI)) { if (!(gpuDeviceSerializable instanceof GpuDevice)) { throw new ResourceHandlerException( "Trying to recover device id, however it" + " is not an instance of " + GpuDevice.class.getName() + ", this should not occur under normal circumstances!"); } GpuDevice gpuDevice = (GpuDevice) gpuDeviceSerializable; // Make sure it is in allowed GPU device. if (!allowedGpuDevices.contains(gpuDevice)) { throw new ResourceHandlerException( "Try to recover device = " + gpuDevice + " however it is not in the allowed device list:" + StringUtils.join(",", allowedGpuDevices)); } // Make sure it is not occupied by anybody else if (usedDevices.containsKey(gpuDevice)) { throw new ResourceHandlerException( "Try to recover device id = " + gpuDevice + " however it is already assigned to container=" + usedDevices .get(gpuDevice) + ", please double check what happened."); } usedDevices.put(gpuDevice, containerId); LOG.info("ContainerId {} is assigned to GpuDevice {} on recovery.", containerId, gpuDevice); } LOG.info("Finished recovery of GpuDevice for {}.", containerId); } /** * Get number of requested GPUs from resource. * @param requestedResource requested resource * @return #gpus. */ public static int getRequestedGpus(Resource requestedResource) { try { return Long.valueOf(requestedResource.getResourceValue( GPU_URI)).intValue(); } catch (ResourceNotFoundException e) { return 0; } } /** * Assign GPU to the specified container. * @param container container to allocate * @return allocation results. * @throws ResourceHandlerException When failed to assign GPUs. */ public GpuAllocation assignGpus(Container container) throws ResourceHandlerException { GpuAllocation allocation = internalAssignGpus(container); // Wait for a maximum of waitPeriodForResource seconds if no // available GPU are there which are yet to be released. int timeWaiting = 0; while (allocation == null) { if (timeWaiting >= waitPeriodForResource) { break; } // Sleep for 1 sec to ensure there are some free GPU devices which are // getting released. try { LOG.info("Container : " + container.getContainerId() + " is waiting for free GPU devices."); Thread.sleep(WAIT_MS_PER_LOOP); timeWaiting += WAIT_MS_PER_LOOP; allocation = internalAssignGpus(container); } catch (InterruptedException e) { // On any interrupt, break the loop and continue execution. Thread.currentThread().interrupt(); LOG.warn("Interrupted while waiting for available GPU"); break; } } if(allocation == null) { String message = "Could not get valid GPU device for container '" + container.getContainerId() + "' as some other containers might not releasing GPUs."; LOG.warn(message); throw new ResourceHandlerException(message); } return allocation; } private synchronized GpuAllocation internalAssignGpus(Container container) throws ResourceHandlerException { Resource requestedResource = container.getResource(); ContainerId containerId = container.getContainerId(); int numRequestedGpuDevices = getRequestedGpus(requestedResource); // Assign GPUs to container if requested some. if (numRequestedGpuDevices > 0) { if (LOG.isDebugEnabled()) { LOG.debug(String.format("Trying to assign %d GPUs to container: %s" + ", #AvailableGPUs=%d, #ReleasingGPUs=%d", numRequestedGpuDevices, containerId, getAvailableGpus(), getReleasingGpus())); } if (numRequestedGpuDevices > getAvailableGpus()) { // If there are some devices which are getting released, wait for few // seconds to get it. if (numRequestedGpuDevices <= getReleasingGpus() + getAvailableGpus()) { return null; } } if (numRequestedGpuDevices > getAvailableGpus()) { throw new ResourceHandlerException( "Failed to find enough GPUs, requestor=" + containerId + ", #RequestedGPUs=" + numRequestedGpuDevices + ", #AvailableGPUs=" + getAvailableGpus()); } Set<GpuDevice> assignedGpus = new TreeSet<>(); for (GpuDevice gpu : allowedGpuDevices) { if (!usedDevices.containsKey(gpu)) { usedDevices.put(gpu, containerId); assignedGpus.add(gpu); if (assignedGpus.size() == numRequestedGpuDevices) { break; } } } // Record in state store if we allocated anything if (!assignedGpus.isEmpty()) { try { // Update state store. nmContext.getNMStateStore().storeAssignedResources(container, GPU_URI, new ArrayList<>(assignedGpus)); } catch (IOException e) { unassignGpus(containerId); throw new ResourceHandlerException(e); } } return new GpuAllocation(assignedGpus, Sets.differenceInTreeSets(allowedGpuDevices, assignedGpus)); } return new GpuAllocation(null, allowedGpuDevices); } private synchronized long getReleasingGpus() { long releasingGpus = 0; for (ContainerId containerId : ImmutableSet.copyOf(usedDevices.values())) { Container container; if ((container = nmContext.getContainers().get(containerId)) != null) { if (container.isContainerInFinalStates()) { releasingGpus = releasingGpus + container.getResource() .getResourceInformation(ResourceInformation.GPU_URI).getValue(); } } } return releasingGpus; } /** * Clean up all GPUs assigned to containerId. * @param containerId containerId */ public synchronized void unassignGpus(ContainerId containerId) { if (LOG.isDebugEnabled()) { LOG.debug("Trying to unassign GPU device from container " + containerId); } usedDevices.entrySet().removeIf(entry -> entry.getValue().equals(containerId)); } @VisibleForTesting public synchronized Map<GpuDevice, ContainerId> getDeviceAllocationMapping() { return ImmutableMap.copyOf(usedDevices); } public synchronized List<GpuDevice> getAllowedGpus() { return ImmutableList.copyOf(allowedGpuDevices); } public synchronized List<AssignedGpuDevice> getAssignedGpus() { return usedDevices.entrySet().stream() .map(e -> { final GpuDevice gpu = e.getKey(); ContainerId containerId = e.getValue(); return new AssignedGpuDevice(gpu.getIndex(), gpu.getMinorNumber(), containerId); }).collect(Collectors.toList()); } @Override public String toString() { return GpuResourceAllocator.class.getName(); } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.forecast.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * The status, start time, and end time of a backtest, as well as a failure reason if applicable. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/forecast-2018-06-26/TestWindowSummary" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class TestWindowSummary implements Serializable, Cloneable, StructuredPojo { /** * <p> * The time at which the test began. * </p> */ private java.util.Date testWindowStart; /** * <p> * The time at which the test ended. * </p> */ private java.util.Date testWindowEnd; /** * <p> * The status of the test. Possible status values are: * </p> * <ul> * <li> * <p> * <code>ACTIVE</code> * </p> * </li> * <li> * <p> * <code>CREATE_IN_PROGRESS</code> * </p> * </li> * <li> * <p> * <code>CREATE_FAILED</code> * </p> * </li> * </ul> */ private String status; /** * <p> * If the test failed, the reason why it failed. * </p> */ private String message; /** * <p> * The time at which the test began. * </p> * * @param testWindowStart * The time at which the test began. */ public void setTestWindowStart(java.util.Date testWindowStart) { this.testWindowStart = testWindowStart; } /** * <p> * The time at which the test began. * </p> * * @return The time at which the test began. */ public java.util.Date getTestWindowStart() { return this.testWindowStart; } /** * <p> * The time at which the test began. * </p> * * @param testWindowStart * The time at which the test began. * @return Returns a reference to this object so that method calls can be chained together. */ public TestWindowSummary withTestWindowStart(java.util.Date testWindowStart) { setTestWindowStart(testWindowStart); return this; } /** * <p> * The time at which the test ended. * </p> * * @param testWindowEnd * The time at which the test ended. */ public void setTestWindowEnd(java.util.Date testWindowEnd) { this.testWindowEnd = testWindowEnd; } /** * <p> * The time at which the test ended. * </p> * * @return The time at which the test ended. */ public java.util.Date getTestWindowEnd() { return this.testWindowEnd; } /** * <p> * The time at which the test ended. * </p> * * @param testWindowEnd * The time at which the test ended. * @return Returns a reference to this object so that method calls can be chained together. */ public TestWindowSummary withTestWindowEnd(java.util.Date testWindowEnd) { setTestWindowEnd(testWindowEnd); return this; } /** * <p> * The status of the test. Possible status values are: * </p> * <ul> * <li> * <p> * <code>ACTIVE</code> * </p> * </li> * <li> * <p> * <code>CREATE_IN_PROGRESS</code> * </p> * </li> * <li> * <p> * <code>CREATE_FAILED</code> * </p> * </li> * </ul> * * @param status * The status of the test. Possible status values are:</p> * <ul> * <li> * <p> * <code>ACTIVE</code> * </p> * </li> * <li> * <p> * <code>CREATE_IN_PROGRESS</code> * </p> * </li> * <li> * <p> * <code>CREATE_FAILED</code> * </p> * </li> */ public void setStatus(String status) { this.status = status; } /** * <p> * The status of the test. Possible status values are: * </p> * <ul> * <li> * <p> * <code>ACTIVE</code> * </p> * </li> * <li> * <p> * <code>CREATE_IN_PROGRESS</code> * </p> * </li> * <li> * <p> * <code>CREATE_FAILED</code> * </p> * </li> * </ul> * * @return The status of the test. Possible status values are:</p> * <ul> * <li> * <p> * <code>ACTIVE</code> * </p> * </li> * <li> * <p> * <code>CREATE_IN_PROGRESS</code> * </p> * </li> * <li> * <p> * <code>CREATE_FAILED</code> * </p> * </li> */ public String getStatus() { return this.status; } /** * <p> * The status of the test. Possible status values are: * </p> * <ul> * <li> * <p> * <code>ACTIVE</code> * </p> * </li> * <li> * <p> * <code>CREATE_IN_PROGRESS</code> * </p> * </li> * <li> * <p> * <code>CREATE_FAILED</code> * </p> * </li> * </ul> * * @param status * The status of the test. Possible status values are:</p> * <ul> * <li> * <p> * <code>ACTIVE</code> * </p> * </li> * <li> * <p> * <code>CREATE_IN_PROGRESS</code> * </p> * </li> * <li> * <p> * <code>CREATE_FAILED</code> * </p> * </li> * @return Returns a reference to this object so that method calls can be chained together. */ public TestWindowSummary withStatus(String status) { setStatus(status); return this; } /** * <p> * If the test failed, the reason why it failed. * </p> * * @param message * If the test failed, the reason why it failed. */ public void setMessage(String message) { this.message = message; } /** * <p> * If the test failed, the reason why it failed. * </p> * * @return If the test failed, the reason why it failed. */ public String getMessage() { return this.message; } /** * <p> * If the test failed, the reason why it failed. * </p> * * @param message * If the test failed, the reason why it failed. * @return Returns a reference to this object so that method calls can be chained together. */ public TestWindowSummary withMessage(String message) { setMessage(message); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getTestWindowStart() != null) sb.append("TestWindowStart: ").append(getTestWindowStart()).append(","); if (getTestWindowEnd() != null) sb.append("TestWindowEnd: ").append(getTestWindowEnd()).append(","); if (getStatus() != null) sb.append("Status: ").append(getStatus()).append(","); if (getMessage() != null) sb.append("Message: ").append(getMessage()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof TestWindowSummary == false) return false; TestWindowSummary other = (TestWindowSummary) obj; if (other.getTestWindowStart() == null ^ this.getTestWindowStart() == null) return false; if (other.getTestWindowStart() != null && other.getTestWindowStart().equals(this.getTestWindowStart()) == false) return false; if (other.getTestWindowEnd() == null ^ this.getTestWindowEnd() == null) return false; if (other.getTestWindowEnd() != null && other.getTestWindowEnd().equals(this.getTestWindowEnd()) == false) return false; if (other.getStatus() == null ^ this.getStatus() == null) return false; if (other.getStatus() != null && other.getStatus().equals(this.getStatus()) == false) return false; if (other.getMessage() == null ^ this.getMessage() == null) return false; if (other.getMessage() != null && other.getMessage().equals(this.getMessage()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getTestWindowStart() == null) ? 0 : getTestWindowStart().hashCode()); hashCode = prime * hashCode + ((getTestWindowEnd() == null) ? 0 : getTestWindowEnd().hashCode()); hashCode = prime * hashCode + ((getStatus() == null) ? 0 : getStatus().hashCode()); hashCode = prime * hashCode + ((getMessage() == null) ? 0 : getMessage().hashCode()); return hashCode; } @Override public TestWindowSummary clone() { try { return (TestWindowSummary) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.forecast.model.transform.TestWindowSummaryMarshaller.getInstance().marshall(this, protocolMarshaller); } }
package de.undercouch.citeproc.csl.internal.rendering; import de.undercouch.citeproc.csl.internal.RenderContext; import de.undercouch.citeproc.csl.internal.behavior.Behavior; import de.undercouch.citeproc.helper.NodeHelper; import org.w3c.dom.Node; import java.util.Objects; import java.util.function.Consumer; /** * Attributes for a name element that can also be inherited from a style, * citation, or bibliography element. * @author Michel Kraemer */ public class SNameInheritableAttributes implements Behavior { private static final String DEFAULT_AND = null; private static final String DEFAULT_DELIMITER_PRECEDES_ET_AL = "contextual"; private static final String DEFAULT_DELIMITER_PRECEDES_LAST = "contextual"; private static final boolean DEFAULT_INITIALIZE = true; private static final String DEFAULT_INITIALIZE_WITH = null; private static final String DEFAULT_NAME_AS_SORT_ORDER = null; private static final String DEFAULT_SORT_SEPARATOR = ", "; private static final Integer DEFAULT_ET_AL_MIN = null; private static final Integer DEFAULT_ET_AL_USE_FIRST = null; private final String and; private final String delimiterPrecedesEtAl; private final String delimiterPrecedesLast; private final boolean initialize; private final String initializeWith; private final String nameAsSortOrder; private final String sortSeparator; private final Integer etAlMin; private final Integer etAlUseFirst; // private final Integer etAlUseLast; // private final Integer etAlUseSubsequentMin; // private final Integer etAlUseSubsequentUseFirst; private final boolean hasInheritableAttributes; public SNameInheritableAttributes(Node node) { String and; boolean initialize; String initializeWith; String nameAsSortOrder; String delimiterPrecedesEtAl; String delimiterPrecedesLast; String sortSeparator; String strEtAlMin; String strEtAlUseFirst; if (node != null) { and = NodeHelper.getAttrValue(node, "and"); String strInitialize = NodeHelper.getAttrValue(node, "initialize"); initialize = strInitialize == null ? DEFAULT_INITIALIZE : Boolean.parseBoolean(strInitialize); initializeWith = NodeHelper.getAttrValue(node, "initialize-with"); nameAsSortOrder = NodeHelper.getAttrValue(node, "name-as-sort-order"); delimiterPrecedesEtAl = NodeHelper.getAttrValue(node, "delimiter-precedes-et-al"); delimiterPrecedesLast = NodeHelper.getAttrValue(node, "delimiter-precedes-last"); sortSeparator = NodeHelper.getAttrValue(node, "sort-separator"); strEtAlMin = NodeHelper.getAttrValue(node, "et-al-min"); strEtAlUseFirst = NodeHelper.getAttrValue(node, "et-al-use-first"); } else { and = DEFAULT_AND; initialize = DEFAULT_INITIALIZE; initializeWith = DEFAULT_INITIALIZE_WITH; nameAsSortOrder = DEFAULT_NAME_AS_SORT_ORDER; delimiterPrecedesEtAl = null; delimiterPrecedesLast = null; sortSeparator = null; strEtAlMin = null; strEtAlUseFirst = null; } if (delimiterPrecedesEtAl == null) { delimiterPrecedesEtAl = DEFAULT_DELIMITER_PRECEDES_ET_AL; } if (delimiterPrecedesLast == null) { delimiterPrecedesLast = DEFAULT_DELIMITER_PRECEDES_LAST; } if (sortSeparator == null) { sortSeparator = DEFAULT_SORT_SEPARATOR; } Integer etAlMin; if (strEtAlMin != null) { etAlMin = Integer.parseInt(strEtAlMin); } else { etAlMin = DEFAULT_ET_AL_MIN; } Integer etAlUseFirst; if (strEtAlUseFirst != null) { etAlUseFirst = Integer.parseInt(strEtAlUseFirst); } else { etAlUseFirst = DEFAULT_ET_AL_USE_FIRST; } this.and = and; this.delimiterPrecedesEtAl = delimiterPrecedesEtAl; this.delimiterPrecedesLast = delimiterPrecedesLast; this.initialize = initialize; this.initializeWith = initializeWith; this.nameAsSortOrder = nameAsSortOrder; this.sortSeparator = sortSeparator; this.etAlMin = etAlMin; this.etAlUseFirst = etAlUseFirst; hasInheritableAttributes = determineHasInheritableAttributes(this); } public SNameInheritableAttributes(String and, String delimiterPrecedesEtAl, String delimiterPrecedesLast, boolean initialize, String initializeWith, String nameAsSortOrder, String sortSeparator, Integer etAlMin, Integer etAlUseFirst) { this.and = and; this.delimiterPrecedesEtAl = delimiterPrecedesEtAl; this.delimiterPrecedesLast = delimiterPrecedesLast; this.initialize = initialize; this.initializeWith = initializeWith; this.nameAsSortOrder = nameAsSortOrder; this.sortSeparator = sortSeparator; this.etAlMin = etAlMin; this.etAlUseFirst = etAlUseFirst; hasInheritableAttributes = determineHasInheritableAttributes(this); } private static boolean determineHasInheritableAttributes(SNameInheritableAttributes nia) { return !Objects.equals(nia.and, DEFAULT_AND) || nia.initialize != DEFAULT_INITIALIZE || !Objects.equals(nia.initializeWith, DEFAULT_INITIALIZE_WITH) || !Objects.equals(nia.nameAsSortOrder, DEFAULT_NAME_AS_SORT_ORDER) || !Objects.equals(nia.delimiterPrecedesEtAl, DEFAULT_DELIMITER_PRECEDES_ET_AL) || !Objects.equals(nia.delimiterPrecedesLast, DEFAULT_DELIMITER_PRECEDES_LAST) || !Objects.equals(nia.sortSeparator, DEFAULT_SORT_SEPARATOR) || !Objects.equals(nia.etAlMin, DEFAULT_ET_AL_MIN) || !Objects.equals(nia.etAlUseFirst, DEFAULT_ET_AL_USE_FIRST); } public String getAnd() { return and; } public String getDelimiterPrecedesEtAl() { return delimiterPrecedesEtAl; } public String getDelimiterPrecedesLast() { return delimiterPrecedesLast; } public boolean isInitialize() { return initialize; } public String getInitializeWith() { return initializeWith; } public String getNameAsSortOrder() { return nameAsSortOrder; } public String getSortSeparator() { return sortSeparator; } public Integer getEtAlMin() { return etAlMin; } public Integer getEtAlUseFirst() { return etAlUseFirst; } public SNameInheritableAttributes merge(SNameInheritableAttributes override) { if (override == null || !override.hasInheritableAttributes) { return this; } String and = this.and; if (!Objects.equals(override.and, DEFAULT_AND)) { and = override.and; } boolean initialize = this.initialize; if (override.initialize != DEFAULT_INITIALIZE) { initialize = override.initialize; } String initializeWith = this.initializeWith; if (!Objects.equals(override.initializeWith, DEFAULT_INITIALIZE_WITH)) { initializeWith = override.initializeWith; } String nameAsSortOrder = this.nameAsSortOrder; if (!Objects.equals(override.nameAsSortOrder, DEFAULT_NAME_AS_SORT_ORDER)) { nameAsSortOrder = override.nameAsSortOrder; } String delimiterPrecedesEtAl = this.delimiterPrecedesEtAl; if (!Objects.equals(override.delimiterPrecedesEtAl, DEFAULT_DELIMITER_PRECEDES_ET_AL)) { delimiterPrecedesEtAl = override.delimiterPrecedesEtAl; } String delimiterPrecedesLast = this.delimiterPrecedesLast; if (!Objects.equals(override.delimiterPrecedesLast, DEFAULT_DELIMITER_PRECEDES_LAST)) { delimiterPrecedesLast = override.delimiterPrecedesLast; } String sortSeparator = this.sortSeparator; if (!Objects.equals(override.sortSeparator, DEFAULT_SORT_SEPARATOR)) { sortSeparator = override.sortSeparator; } Integer etAlMin = this.etAlMin; if (!Objects.equals(override.etAlMin, DEFAULT_ET_AL_MIN)) { etAlMin = override.etAlMin; } Integer etAlUseFirst = this.etAlUseFirst; if (!Objects.equals(override.etAlUseFirst, DEFAULT_ET_AL_USE_FIRST)) { etAlUseFirst = override.etAlUseFirst; } return new SNameInheritableAttributes(and, delimiterPrecedesEtAl, delimiterPrecedesLast, initialize, initializeWith, nameAsSortOrder, sortSeparator, etAlMin, etAlUseFirst); } @Override public void accept(Consumer<RenderContext> renderFunction, RenderContext ctx) { if (hasInheritableAttributes) { RenderContext tmp = new RenderContext(ctx, this); renderFunction.accept(tmp); ctx.emit(tmp.getResult()); } else { renderFunction.accept(ctx); } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.test.mget; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.get.MultiGetItemResponse; import org.elasticsearch.action.get.MultiGetRequest; import org.elasticsearch.action.get.MultiGetRequestBuilder; import org.elasticsearch.action.get.MultiGetResponse; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.testframework.ESIntegTestCase; import java.io.IOException; import java.util.Map; import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.testframework.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; public class SimpleMgetIT extends ESIntegTestCase { public void testThatMgetShouldWorkWithOneIndexMissing() throws IOException { createIndex("test"); client().prepareIndex("test", "test", "1").setSource(jsonBuilder().startObject().field("foo", "bar").endObject()) .setRefreshPolicy(IMMEDIATE).get(); MultiGetResponse mgetResponse = client().prepareMultiGet() .add(new MultiGetRequest.Item("test", "test", "1")) .add(new MultiGetRequest.Item("nonExistingIndex", "test", "1")) .get(); assertThat(mgetResponse.getResponses().length, is(2)); assertThat(mgetResponse.getResponses()[0].getIndex(), is("test")); assertThat(mgetResponse.getResponses()[0].isFailed(), is(false)); assertThat(mgetResponse.getResponses()[1].getIndex(), is("nonExistingIndex")); assertThat(mgetResponse.getResponses()[1].isFailed(), is(true)); assertThat(mgetResponse.getResponses()[1].getFailure().getMessage(), is("no such index")); assertThat(((ElasticsearchException) mgetResponse.getResponses()[1].getFailure().getFailure()).getIndex().getName(), is("nonExistingIndex")); mgetResponse = client().prepareMultiGet() .add(new MultiGetRequest.Item("nonExistingIndex", "test", "1")) .get(); assertThat(mgetResponse.getResponses().length, is(1)); assertThat(mgetResponse.getResponses()[0].getIndex(), is("nonExistingIndex")); assertThat(mgetResponse.getResponses()[0].isFailed(), is(true)); assertThat(mgetResponse.getResponses()[0].getFailure().getMessage(), is("no such index")); assertThat(((ElasticsearchException) mgetResponse.getResponses()[0].getFailure().getFailure()).getIndex().getName(), is("nonExistingIndex")); } public void testThatMgetShouldWorkWithMultiIndexAlias() throws IOException { assertAcked(prepareCreate("test").addAlias(new Alias("multiIndexAlias"))); assertAcked(prepareCreate("test2").addAlias(new Alias("multiIndexAlias"))); client().prepareIndex("test", "test", "1").setSource(jsonBuilder().startObject().field("foo", "bar").endObject()) .setRefreshPolicy(IMMEDIATE).get(); MultiGetResponse mgetResponse = client().prepareMultiGet() .add(new MultiGetRequest.Item("test", "test", "1")) .add(new MultiGetRequest.Item("multiIndexAlias", "test", "1")) .get(); assertThat(mgetResponse.getResponses().length, is(2)); assertThat(mgetResponse.getResponses()[0].getIndex(), is("test")); assertThat(mgetResponse.getResponses()[0].isFailed(), is(false)); assertThat(mgetResponse.getResponses()[1].getIndex(), is("multiIndexAlias")); assertThat(mgetResponse.getResponses()[1].isFailed(), is(true)); assertThat(mgetResponse.getResponses()[1].getFailure().getMessage(), containsString("more than one indices")); mgetResponse = client().prepareMultiGet() .add(new MultiGetRequest.Item("multiIndexAlias", "test", "1")) .get(); assertThat(mgetResponse.getResponses().length, is(1)); assertThat(mgetResponse.getResponses()[0].getIndex(), is("multiIndexAlias")); assertThat(mgetResponse.getResponses()[0].isFailed(), is(true)); assertThat(mgetResponse.getResponses()[0].getFailure().getMessage(), containsString("more than one indices")); } public void testThatMgetShouldWorkWithAliasRouting() throws IOException { assertAcked(prepareCreate("test").addAlias(new Alias("alias1").routing("abc")) .addMapping("test", jsonBuilder() .startObject().startObject("test").startObject("_routing").field("required", true).endObject().endObject().endObject())); client().prepareIndex("alias1", "test", "1").setSource(jsonBuilder().startObject().field("foo", "bar").endObject()) .setRefreshPolicy(IMMEDIATE).get(); MultiGetResponse mgetResponse = client().prepareMultiGet() .add(new MultiGetRequest.Item("alias1", "test", "1")) .get(); assertEquals(1, mgetResponse.getResponses().length); assertEquals("test", mgetResponse.getResponses()[0].getIndex()); assertFalse(mgetResponse.getResponses()[0].isFailed()); } public void testThatParentPerDocumentIsSupported() throws Exception { assertAcked(prepareCreate("test").addAlias(new Alias("alias")) .addMapping("test", jsonBuilder() .startObject() .startObject("test") .startObject("_parent") .field("type", "foo") .endObject() .endObject() .endObject())); client().prepareIndex("test", "test", "1").setParent("4").setRefreshPolicy(IMMEDIATE) .setSource(jsonBuilder().startObject().field("foo", "bar").endObject()) .get(); MultiGetResponse mgetResponse = client().prepareMultiGet() .add(new MultiGetRequest.Item(indexOrAlias(), "test", "1").parent("4")) .add(new MultiGetRequest.Item(indexOrAlias(), "test", "1")) .get(); assertThat(mgetResponse.getResponses().length, is(2)); assertThat(mgetResponse.getResponses()[0].isFailed(), is(false)); assertThat(mgetResponse.getResponses()[0].getResponse().isExists(), is(true)); assertThat(mgetResponse.getResponses()[1].isFailed(), is(true)); assertThat(mgetResponse.getResponses()[1].getResponse(), nullValue()); assertThat(mgetResponse.getResponses()[1].getFailure().getMessage(), equalTo("routing is required for [test]/[test]/[1]")); } @SuppressWarnings("unchecked") public void testThatSourceFilteringIsSupported() throws Exception { assertAcked(prepareCreate("test").addAlias(new Alias("alias"))); BytesReference sourceBytesRef = BytesReference.bytes(jsonBuilder().startObject() .array("field", "1", "2") .startObject("included").field("field", "should be seen").field("hidden_field", "should not be seen").endObject() .field("excluded", "should not be seen") .endObject()); for (int i = 0; i < 100; i++) { client().prepareIndex("test", "type", Integer.toString(i)).setSource(sourceBytesRef, XContentType.JSON).get(); } MultiGetRequestBuilder request = client().prepareMultiGet(); for (int i = 0; i < 100; i++) { if (i % 2 == 0) { request.add(new MultiGetRequest.Item(indexOrAlias(), "type", Integer.toString(i)) .fetchSourceContext(new FetchSourceContext(true, new String[] {"included"}, new String[] {"*.hidden_field"}))); } else { request.add(new MultiGetRequest.Item(indexOrAlias(), "type", Integer.toString(i)) .fetchSourceContext(new FetchSourceContext(false))); } } MultiGetResponse response = request.get(); assertThat(response.getResponses().length, equalTo(100)); for (int i = 0; i < 100; i++) { MultiGetItemResponse responseItem = response.getResponses()[i]; assertThat(responseItem.getIndex(), equalTo("test")); if (i % 2 == 0) { Map<String, Object> source = responseItem.getResponse().getSourceAsMap(); assertThat(source.size(), equalTo(1)); assertThat(source, hasKey("included")); assertThat(((Map<String, Object>) source.get("included")).size(), equalTo(1)); assertThat(((Map<String, Object>) source.get("included")), hasKey("field")); } else { assertThat(responseItem.getResponse().getSourceAsBytes(), nullValue()); } } } public void testThatRoutingPerDocumentIsSupported() throws Exception { assertAcked(prepareCreate("test").addAlias(new Alias("alias")) .setSettings(Settings.builder() .put(indexSettings()) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, between(2, DEFAULT_MAX_NUM_SHARDS)))); final String id = routingKeyForShard("test", 0); final String routingOtherShard = routingKeyForShard("test", 1); client().prepareIndex("test", "test", id).setRefreshPolicy(IMMEDIATE).setRouting(routingOtherShard) .setSource(jsonBuilder().startObject().field("foo", "bar").endObject()) .get(); MultiGetResponse mgetResponse = client().prepareMultiGet() .add(new MultiGetRequest.Item(indexOrAlias(), "test", id).routing(routingOtherShard)) .add(new MultiGetRequest.Item(indexOrAlias(), "test", id)) .get(); assertThat(mgetResponse.getResponses().length, is(2)); assertThat(mgetResponse.getResponses()[0].isFailed(), is(false)); assertThat(mgetResponse.getResponses()[0].getResponse().isExists(), is(true)); assertThat(mgetResponse.getResponses()[0].getResponse().getIndex(), is("test")); assertThat(mgetResponse.getResponses()[1].isFailed(), is(false)); assertThat(mgetResponse.getResponses()[1].getResponse().isExists(), is(false)); assertThat(mgetResponse.getResponses()[1].getResponse().getIndex(), is("test")); } private static String indexOrAlias() { return randomBoolean() ? "test" : "alias"; } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.analysis; import org.elasticsearch.index.analysis.compound.DictionaryCompoundWordTokenFilterFactory; import org.elasticsearch.index.analysis.compound.HyphenationCompoundWordTokenFilterFactory; import org.elasticsearch.test.ElasticsearchTestCase; import java.util.HashMap; import java.util.Map; import java.util.Set; import java.util.TreeSet; /** * Alerts us if new analyzers are added to lucene, so we don't miss them. * <p> * If we don't want to expose one for a specific reason, just map it to Void */ public class AnalysisFactoryTests extends ElasticsearchTestCase { static final Map<String,Class<?>> KNOWN_TOKENIZERS = new HashMap<String,Class<?>>() {{ // deprecated ones, we dont care about these put("arabicletter", Deprecated.class); put("chinese", Deprecated.class); put("cjk", Deprecated.class); put("russianletter", Deprecated.class); // exposed in ES put("edgengram", EdgeNGramTokenizerFactory.class); put("keyword", KeywordTokenizerFactory.class); put("letter", LetterTokenizerFactory.class); put("lowercase", LowerCaseTokenizerFactory.class); put("ngram", NGramTokenizerFactory.class); put("pathhierarchy", PathHierarchyTokenizerFactory.class); put("pattern", PatternTokenizerFactory.class); put("standard", StandardTokenizerFactory.class); put("uax29urlemail", UAX29URLEmailTokenizerFactory.class); put("whitespace", WhitespaceTokenizerFactory.class); // TODO: these tokenizers are not yet exposed: useful? // historical version of standardtokenizer... tries to recognize // company names and a few other things. not good for asian languages etc. put("classic", Void.class); // we should add this, the thaiwordfilter is deprecated. this one has correct offsets put("thai", Void.class); // this one "seems to mess up offsets". probably shouldn't be a tokenizer... put("wikipedia", Void.class); }}; public void testTokenizers() { Set<String> missing = new TreeSet<String>(org.apache.lucene.analysis.util.TokenizerFactory.availableTokenizers()); missing.removeAll(KNOWN_TOKENIZERS.keySet()); assertTrue("new tokenizers found, please update KNOWN_TOKENIZERS: " + missing.toString(), missing.isEmpty()); } static final Map<String,Class<?>> KNOWN_TOKENFILTERS = new HashMap<String,Class<?>>() {{ // deprecated ones, we dont care about these put("chinese", Deprecated.class); put("collationkey", Deprecated.class); put("position", Deprecated.class); put("thaiword", Deprecated.class); // exposed in ES put("arabicnormalization", ArabicNormalizationFilterFactory.class); put("arabicstem", ArabicStemTokenFilterFactory.class); put("asciifolding", ASCIIFoldingTokenFilterFactory.class); put("brazilianstem", BrazilianStemTokenFilterFactory.class); put("bulgarianstem", StemmerTokenFilterFactory.class); put("cjkbigram", CJKBigramFilterFactory.class); put("cjkwidth", CJKWidthFilterFactory.class); put("commongrams", CommonGramsTokenFilterFactory.class); put("commongramsquery", CommonGramsTokenFilterFactory.class); put("czechstem", CzechStemTokenFilterFactory.class); put("delimitedpayload", DelimitedPayloadTokenFilterFactory.class); put("dictionarycompoundword", DictionaryCompoundWordTokenFilterFactory.class); put("edgengram", EdgeNGramTokenFilterFactory.class); put("elision", ElisionTokenFilterFactory.class); put("englishminimalstem", StemmerTokenFilterFactory.class); put("englishpossessive", StemmerTokenFilterFactory.class); put("finnishlightstem", StemmerTokenFilterFactory.class); put("frenchlightstem", StemmerTokenFilterFactory.class); put("frenchminimalstem", StemmerTokenFilterFactory.class); put("germanstem", GermanStemTokenFilterFactory.class); put("germanlightstem", StemmerTokenFilterFactory.class); put("germanminimalstem", StemmerTokenFilterFactory.class); put("greeklowercase", LowerCaseTokenFilterFactory.class); put("greekstem", StemmerTokenFilterFactory.class); put("hindistem", StemmerTokenFilterFactory.class); put("hindistem", StemmerTokenFilterFactory.class); put("hungarianlightstem", StemmerTokenFilterFactory.class); put("hunspellstem", HunspellTokenFilterFactory.class); put("hyphenationcompoundword", HyphenationCompoundWordTokenFilterFactory.class); put("indonesianstem", StemmerTokenFilterFactory.class); put("italianlightstem", StemmerTokenFilterFactory.class); put("keepword", KeepWordFilterFactory.class); put("keywordmarker", KeywordMarkerTokenFilterFactory.class); put("kstem", KStemTokenFilterFactory.class); put("latvianstem", StemmerTokenFilterFactory.class); put("length", LengthTokenFilterFactory.class); put("limittokencount", LimitTokenCountFilterFactory.class); put("lowercase", LowerCaseTokenFilterFactory.class); put("ngram", NGramTokenFilterFactory.class); put("norwegianminimalstem", StemmerTokenFilterFactory.class); put("patterncapturegroup", PatternCaptureGroupTokenFilterFactory.class); put("patternreplace", PatternReplaceTokenFilterFactory.class); put("persiannormalization", PersianNormalizationFilterFactory.class); put("porterstem", PorterStemTokenFilterFactory.class); put("portugueselightstem", StemmerTokenFilterFactory.class); put("portugueseminimalstem", StemmerTokenFilterFactory.class); put("reversestring", ReverseTokenFilterFactory.class); put("russianlightstem", StemmerTokenFilterFactory.class); put("shingle", ShingleTokenFilterFactory.class); put("snowballporter", SnowballTokenFilterFactory.class); put("spanishlightstem", StemmerTokenFilterFactory.class); put("standard", StandardTokenFilterFactory.class); put("stemmeroverride", StemmerOverrideTokenFilterFactory.class); put("stop", StopTokenFilterFactory.class); put("swedishlightstem", StemmerTokenFilterFactory.class); put("synonym", SynonymTokenFilterFactory.class); put("trim", TrimTokenFilterFactory.class); put("truncate", TruncateTokenFilterFactory.class); put("turkishlowercase", LowerCaseTokenFilterFactory.class); put("uppercase", UpperCaseTokenFilterFactory.class); put("worddelimiter", WordDelimiterTokenFilterFactory.class); // TODO: these tokenfilters are not yet exposed: useful? // useful for turkish language put("apostrophe", Void.class); // capitalizes tokens put("capitalization", Void.class); // cleans up after classic tokenizer put("classic", Void.class); // like length filter (but codepoints) put("codepointcount", Void.class); // galician language stemmers put("galicianminimalstem", Void.class); put("galicianstem", Void.class); // o+umlaut=oe type normalization for german put("germannormalization", Void.class); // hindi text normalization put("hindinormalization", Void.class); // puts hyphenated words back together put("hyphenatedwords", Void.class); // unicode normalization for indian languages put("indicnormalization", Void.class); // lowercasing for irish: add to LowerCase (has a stemmer, too) put("irishlowercase", Void.class); // repeats anything marked as keyword put("keywordrepeat", Void.class); // like limittokencount, but by position put("limittokenposition", Void.class); // ??? put("numericpayload", Void.class); // RSLP stemmer for portuguese put("portuguesestem", Void.class); // light stemming for norwegian (has nb/nn options too) put("norwegianlightstem", Void.class); // removes duplicates at the same position (this should be used by the existing factory) put("removeduplicates", Void.class); // accent handling for scandinavian languages put("scandinavianfolding", Void.class); // less aggressive accent handling for scandinavian languages put("scandinaviannormalization", Void.class); // kurdish language support put("soraninormalization", Void.class); put("soranistem", Void.class); // ??? put("tokenoffsetpayload", Void.class); // like a stop filter but by token-type put("type", Void.class); // puts the type into the payload put("typeaspayload", Void.class); }}; public void testTokenFilters() { Set<String> missing = new TreeSet<String>(org.apache.lucene.analysis.util.TokenFilterFactory.availableTokenFilters()); missing.removeAll(KNOWN_TOKENFILTERS.keySet()); assertTrue("new tokenfilters found, please update KNOWN_TOKENFILTERS: " + missing.toString(), missing.isEmpty()); } static final Map<String,Class<?>> KNOWN_CHARFILTERS = new HashMap<String,Class<?>>() {{ // exposed in ES put("htmlstrip", HtmlStripCharFilterFactory.class); put("mapping", MappingCharFilterFactory.class); put("patternreplace", PatternReplaceCharFilterFactory.class); // TODO: these charfilters are not yet exposed: useful? // handling of zwnj for persian put("persian", Void.class); }}; public void testCharFilters() { Set<String> missing = new TreeSet<String>(org.apache.lucene.analysis.util.CharFilterFactory.availableCharFilters()); missing.removeAll(KNOWN_CHARFILTERS.keySet()); assertTrue("new charfilters found, please update KNOWN_CHARFILTERS: " + missing.toString(), missing.isEmpty()); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jdbi.v3.core.result; import java.lang.reflect.Type; import java.sql.ResultSet; import java.sql.SQLException; import java.util.Map; import java.util.function.BiConsumer; import java.util.function.BiFunction; import java.util.function.Supplier; import java.util.stream.Collector; import java.util.stream.Stream; import org.jdbi.v3.core.collector.ElementTypeNotFoundException; import org.jdbi.v3.core.collector.NoSuchCollectorException; import org.jdbi.v3.core.config.Configurable; import org.jdbi.v3.core.generic.GenericType; import org.jdbi.v3.core.mapper.ColumnMapper; import org.jdbi.v3.core.mapper.GenericMapMapperFactory; import org.jdbi.v3.core.mapper.MapMapper; import org.jdbi.v3.core.mapper.NoSuchMapperException; import org.jdbi.v3.core.mapper.RowMapper; import org.jdbi.v3.core.mapper.RowViewMapper; import org.jdbi.v3.core.mapper.SingleColumnMapper; import org.jdbi.v3.core.mapper.reflect.BeanMapper; import org.jdbi.v3.core.qualifier.QualifiedType; import org.jdbi.v3.core.result.internal.RowViewImpl; import org.jdbi.v3.core.statement.StatementContext; /** * Provides access to the contents of a {@link ResultSet} by mapping to Java types. */ public interface ResultBearing { /** * Returns a ResultBearing backed by the given result set supplier and context. * * @param resultSetSupplier result set supplier * @param ctx the statement context * @return a ResultBearing */ static ResultBearing of(Supplier<ResultSet> resultSetSupplier, StatementContext ctx) { return new ResultBearing() { @Override public <R> R scanResultSet(ResultSetScanner<R> mapper) { try { return mapper.scanResultSet(resultSetSupplier, ctx); } catch (SQLException e) { throw new ResultSetException("Error reading result set", e, ctx); } } }; } /** * Invokes the mapper with a result set supplier, and returns the value returned by the mapper. * @param mapper result set scanner * @param <R> result type returned by the mapper. * @return the value returned by the mapper. */ <R> R scanResultSet(ResultSetScanner<R> mapper); /** * Maps this result set to a {@link ResultIterable} of the given element type. * * @param type the type to map the result set rows to * @param <T> the type to map the result set rows to * @return a {@link ResultIterable} of the given type. * @see Configurable#registerRowMapper(RowMapper) * @see Configurable#registerRowMapper(org.jdbi.v3.core.mapper.RowMapperFactory) * @see Configurable#registerColumnMapper(org.jdbi.v3.core.mapper.ColumnMapperFactory) * @see Configurable#registerColumnMapper(ColumnMapper) */ default <T> ResultIterable<T> mapTo(Class<T> type) { return mapTo(QualifiedType.of(type)); } /** * Maps this result set to a {@link ResultIterable} of the given element type. * * @param type the type to map the result set rows to * @param <T> the type to map the result set rows to * @return a {@link ResultIterable} of the given type. * @see Configurable#registerRowMapper(RowMapper) * @see Configurable#registerRowMapper(org.jdbi.v3.core.mapper.RowMapperFactory) * @see Configurable#registerColumnMapper(org.jdbi.v3.core.mapper.ColumnMapperFactory) * @see Configurable#registerColumnMapper(ColumnMapper) */ default <T> ResultIterable<T> mapTo(GenericType<T> type) { return mapTo(QualifiedType.of(type)); } /** * Maps this result set to a {@link ResultIterable} of the given element type. * * @param type the type to map the result set rows to * @return a {@link ResultIterable} of the given type. * @see Configurable#registerRowMapper(RowMapper) * @see Configurable#registerRowMapper(org.jdbi.v3.core.mapper.RowMapperFactory) * @see Configurable#registerColumnMapper(org.jdbi.v3.core.mapper.ColumnMapperFactory) * @see Configurable#registerColumnMapper(ColumnMapper) */ default ResultIterable<?> mapTo(Type type) { return mapTo(QualifiedType.of(type)); } /** * Maps this result set to a {@link ResultIterable} of the given qualified element type. * * @param type the qualified type to map the result set rows to * @return a {@link ResultIterable} of the given type. * @see Configurable#registerRowMapper(RowMapper) * @see Configurable#registerRowMapper(org.jdbi.v3.core.mapper.RowMapperFactory) * @see Configurable#registerColumnMapper(org.jdbi.v3.core.mapper.ColumnMapperFactory) * @see Configurable#registerColumnMapper(ColumnMapper) */ default <T> ResultIterable<T> mapTo(QualifiedType<T> type) { return scanResultSet((supplier, ctx) -> { RowMapper<T> mapper = ctx.findMapperFor(type) .orElseThrow(() -> new NoSuchMapperException("No mapper registered for type " + type)); return ResultIterable.of(supplier, mapper, ctx); }); } /** * Maps this result set to a {@link ResultIterable} of the given element type, using {@link BeanMapper}. * * @param type the bean type to map the result set rows to * @param <T> the bean type to map the result set rows to * @return a {@link ResultIterable} of the given type. */ @SuppressWarnings("deprecation") default <T> ResultIterable<T> mapToBean(Class<T> type) { return map(BeanMapper.of(type)); } /** * Maps this result set to a {@link ResultIterable} of {@code Map<String,Object>}. Keys are column names, and * values are column values. * * @return a {@link ResultIterable ResultIterable&lt;Map&lt;String,Object&gt;&gt;}. */ default ResultIterable<Map<String, Object>> mapToMap() { return map(new MapMapper()); } /** * Maps this result set to a {@link Map} of {@link String} and the given value class. * * @param <T> the value type * @param valueType the class to map the resultset columns to * @return a {@link Map} of String and the given type. * @see Configurable#registerColumnMapper(ColumnMapper) */ default <T> ResultIterable<Map<String, T>> mapToMap(Class<T> valueType) { return scanResultSet((supplier, ctx) -> ResultIterable.of(supplier, GenericMapMapperFactory.getMapperForValueType(valueType, ctx.getConfig()), ctx)); } /** * Maps this result set to a {@link Map} of {@link String} and the given value type. * * @param <T> the value type * @param valueType the type to map the resultset columns to * @return a {@link Map} of String and the given type. * @see Configurable#registerColumnMapper(ColumnMapper) */ default <T> ResultIterable<Map<String, T>> mapToMap(GenericType<T> valueType) { return scanResultSet((supplier, ctx) -> ResultIterable.of(supplier, GenericMapMapperFactory.getMapperForValueType(valueType, ctx.getConfig()), ctx)); } /** * Maps this result set to a {@link ResultIterable}, using the given column mapper. * * @param mapper column mapper used to map the first column of each row * @param <T> the type to map the result set rows to * @return a {@link ResultIterable} of type {@code <T>}. */ default <T> ResultIterable<T> map(ColumnMapper<T> mapper) { return map(new SingleColumnMapper<>(mapper)); } /** * Maps this result set to a {@link ResultIterable}, using the given row mapper. * * @param mapper mapper used to map each row * @param <T> the type to map the result set rows to * @return a {@link ResultIterable} of type {@code <T>}. */ default <T> ResultIterable<T> map(RowMapper<T> mapper) { return scanResultSet((supplier, ctx) -> ResultIterable.of(supplier, mapper, ctx)); } /** * Maps this result set to a {@link ResultIterable}, using the given {@link RowViewMapper}. * This overload only exists to allow RowViewMapper as the type of a lambda expression. * * @param mapper RowViewMapper used to map each row * @param <T> the type to map the result set rows to * @return a {@link ResultIterable} of type {@code <T>}. */ default <T> ResultIterable<T> map(RowViewMapper<T> mapper) { return map((RowMapper<T>) mapper); } /** * Reduce the result rows using the given row reducer. * * @param reducer the row reducer. * @param <C> Mutable result container type * @param <R> Result element type * @return the stream of result elements * @see RowReducer */ default <C, R> Stream<R> reduceRows(RowReducer<C, R> reducer) { return scanResultSet((supplier, ctx) -> { try (ResultSet rs = supplier.get()) { RowView rowView = new RowViewImpl(rs, ctx); C container = reducer.container(); while (rs.next()) { reducer.accumulate(container, rowView); } return reducer.stream(container); } catch (SQLException e) { throw new UnableToProduceResultException(e, ctx); } finally { ctx.close(); } }); } /** * Reduce the result rows using a {@link Map Map&lt;K, V&gt;} as the * result container. * * @param accumulator accumulator function which gathers data from each * {@link RowView} into the result map. * @param <K> map key type * @param <V> map value type * @return the stream of elements in the container's {@link Map#values()} * collection, in the order they were inserted. */ default <K, V> Stream<V> reduceRows(BiConsumer<Map<K, V>, RowView> accumulator) { return reduceRows((LinkedHashMapRowReducer<K, V>) accumulator::accept); } /** * Reduce the results. Using a {@code BiFunction<U, RowView, U>}, repeatedly * combine query results until only a single value remains. * * @param <U> the type of the accumulator * @param seed the {@code U} to combine with the first result * @param accumulator the function to apply repeatedly * @return the final {@code U} */ default <U> U reduceRows(U seed, BiFunction<U, RowView, U> accumulator) { return scanResultSet((supplier, ctx) -> { try (ResultSet rs = supplier.get()) { RowView rv = new RowViewImpl(rs, ctx); U result = seed; while (rs.next()) { result = accumulator.apply(result, rv); } return result; } catch (SQLException e) { throw new UnableToProduceResultException(e, ctx); } finally { ctx.close(); } }); } /** * Reduce the results. Using a {@code ResultSetAccumulator}, repeatedly * combine query results until only a single value remains. * * @param <U> the accumulator type * @param seed the {@code U} to combine with the first result * @param accumulator the function to apply repeatedly * @return the final {@code U} */ default <U> U reduceResultSet(U seed, ResultSetAccumulator<U> accumulator) { return scanResultSet((supplier, ctx) -> { try (ResultSet rs = supplier.get()) { U result = seed; while (rs.next()) { result = accumulator.apply(result, rs, ctx); } return result; } catch (SQLException e) { throw new UnableToProduceResultException(e, ctx); } finally { ctx.close(); } }); } /** * Collect the results using the given collector. Do not attempt to accumulate the * {@link RowView} objects into the result--they are only valid within the * {@link Collector#accumulator()} function. Instead, extract mapped types from the * RowView by calling {@code RowView.getRow()} or {@code RowView.getColumn()}. * * @param collector the collector to collect the result rows. * @param <A> the mutable accumulator type used by the collector. * @param <R> the result type returned by the collector. * @return the result of the collection */ default <A, R> R collectRows(Collector<RowView, A, R> collector) { return scanResultSet((supplier, ctx) -> { try (ResultSet rs = supplier.get()) { RowView rv = new RowViewImpl(rs, ctx); A acc = collector.supplier().get(); BiConsumer<A, RowView> consumer = collector.accumulator(); while (rs.next()) { consumer.accept(acc, rv); } return collector.finisher().apply(acc); } catch (SQLException e) { throw new UnableToProduceResultException(e, ctx); } finally { ctx.close(); } }); } /** * Collect the results into a container of the given type. A collector * must be registered for the container type, which knows the element type * for the container. A mapper must be registered for the element type. * <p> * This method is equivalent to {@code ResultBearing.mapTo(elementType).collect(containerCollector)}. * </p> * @param containerType the container type into which results will be collected * @param <R> the result container type * @return a container into which result rows have been collected */ @SuppressWarnings("unchecked") default <R> R collectInto(Class<R> containerType) { return (R) collectInto((Type) containerType); } /** * Collect the results into a container of the given generic type. A collector * must be registered for the container type, which knows the element type * for the container. A mapper must be registered for the element type. * <p> * This method is equivalent to {@code ResultBearing.mapTo(elementType).collect(containerCollector)}. * </p> * <p> * Example: * </p> * <pre> * Map&lt;Long, User&gt; usersById = handle.createQuery("select * from user") * .configure(MapEntryMappers.class, cfg -&gt; cfg.setKeyColumn("id")) * .collectInto(new GenericType&lt;Map&lt;Long, User&gt;&gt;() {}); * </pre> * * @param containerType the container type into which results will be collected * @param <R> the result container type * @return a container into which result rows have been collected */ @SuppressWarnings("unchecked") default <R> R collectInto(GenericType<R> containerType) { return (R) collectInto(containerType.getType()); } /** * Collect the results into a container of the given type. A collector * must be registered for the container type, which knows the element type * for the container. A mapper must be registered for the element type. * <p> * This method is equivalent to {@code ResultBearing.mapTo(elementType).collect(containerCollector)}. * </p> * * @param containerType the container type into which results will be collected * @return a container into which result rows have been collected */ @SuppressWarnings({ "unchecked", "rawtypes" }) default Object collectInto(Type containerType) { return scanResultSet((rs, ctx) -> { Collector collector = ctx.findCollectorFor(containerType) .orElseThrow(() -> new NoSuchCollectorException("No collector registered for container type " + containerType)); Type elementType = ctx.findElementTypeFor(containerType) .orElseThrow(() -> new ElementTypeNotFoundException("Unknown element type for container type " + containerType)); RowMapper<?> mapper = ctx.findMapperFor(elementType) .orElseThrow(() -> new NoSuchMapperException("No mapper registered for element type " + elementType)); return ResultIterable.of(rs, mapper, ctx).collect(collector); }); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.segment.filter; import com.google.common.base.Function; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import nl.jqno.equalsverifier.EqualsVerifier; import org.apache.druid.common.config.NullHandling; import org.apache.druid.data.input.InputRow; import org.apache.druid.data.input.impl.DimensionsSpec; import org.apache.druid.data.input.impl.InputRowParser; import org.apache.druid.data.input.impl.MapInputRowParser; import org.apache.druid.data.input.impl.TimeAndDimsParseSpec; import org.apache.druid.data.input.impl.TimestampSpec; import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.common.IAE; import org.apache.druid.java.util.common.Pair; import org.apache.druid.js.JavaScriptConfig; import org.apache.druid.query.extraction.ExtractionFn; import org.apache.druid.query.extraction.JavaScriptExtractionFn; import org.apache.druid.query.extraction.MapLookupExtractor; import org.apache.druid.query.filter.DimFilter; import org.apache.druid.query.filter.Filter; import org.apache.druid.query.filter.InDimFilter; import org.apache.druid.query.lookup.LookupExtractionFn; import org.apache.druid.query.lookup.LookupExtractor; import org.apache.druid.segment.IndexBuilder; import org.apache.druid.segment.StorageAdapter; import org.junit.AfterClass; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import java.io.Closeable; import java.util.ArrayList; import java.util.List; import java.util.Map; @RunWith(Parameterized.class) public class InFilterTest extends BaseFilterTest { private static final String TIMESTAMP_COLUMN = "timestamp"; private static final InputRowParser<Map<String, Object>> PARSER = new MapInputRowParser( new TimeAndDimsParseSpec( new TimestampSpec(TIMESTAMP_COLUMN, "iso", DateTimes.of("2000")), new DimensionsSpec(null, null, null) ) ); private static final List<InputRow> ROWS = ImmutableList.of( PARSER.parseBatch(ImmutableMap.of("dim0", "a", "dim1", "", "dim2", ImmutableList.of("a", "b"))).get(0), PARSER.parseBatch(ImmutableMap.of("dim0", "b", "dim1", "10", "dim2", ImmutableList.of())).get(0), PARSER.parseBatch(ImmutableMap.of("dim0", "c", "dim1", "2", "dim2", ImmutableList.of(""))).get(0), PARSER.parseBatch(ImmutableMap.of("dim0", "d", "dim1", "1", "dim2", ImmutableList.of("a"))).get(0), PARSER.parseBatch(ImmutableMap.of("dim0", "e", "dim1", "def", "dim2", ImmutableList.of("c"))).get(0), PARSER.parseBatch(ImmutableMap.of("dim0", "f", "dim1", "abc")).get(0) ); public InFilterTest( String testName, IndexBuilder indexBuilder, Function<IndexBuilder, Pair<StorageAdapter, Closeable>> finisher, boolean cnf, boolean optimize ) { super(testName, ROWS, indexBuilder, finisher, cnf, optimize); } @Rule public ExpectedException expectedException = ExpectedException.none(); @AfterClass public static void tearDown() throws Exception { BaseFilterTest.tearDown(InFilterTest.class.getName()); } @Test public void testSingleValueStringColumnWithoutNulls() { assertFilterMatches( toInFilter("dim0"), ImmutableList.of() ); assertFilterMatches( toInFilter("dim0", null), ImmutableList.of() ); assertFilterMatches( toInFilter("dim0", "", ""), ImmutableList.of() ); assertFilterMatches( toInFilter("dim0", "a", "c"), ImmutableList.of("a", "c") ); assertFilterMatches( toInFilter("dim0", "e", "x"), ImmutableList.of("e") ); } @Test public void testSingleValueStringColumnWithNulls() { assertFilterMatches( toInFilter("dim1", null, ""), ImmutableList.of("a") ); assertFilterMatches( toInFilter("dim1", ""), ImmutableList.of("a") ); if (NullHandling.replaceWithDefault()) { assertFilterMatches( toInFilter("dim1", null, "10", "abc"), ImmutableList.of("a", "b", "f") ); } else { assertFilterMatches( toInFilter("dim1", null, "10", "abc"), ImmutableList.of("b", "f") ); } assertFilterMatches( toInFilter("dim1", "-1", "ab", "de"), ImmutableList.of() ); } @Test public void testMultiValueStringColumn() { if (NullHandling.replaceWithDefault()) { assertFilterMatches( toInFilter("dim2", "b", "d"), ImmutableList.of("a") ); assertFilterMatches( toInFilter("dim2", null), ImmutableList.of("b", "c", "f") ); assertFilterMatches( toInFilter("dim2", null, "a"), ImmutableList.of("a", "b", "c", "d", "f") ); assertFilterMatches( toInFilter("dim2", null, "b"), ImmutableList.of("a", "b", "c", "f") ); assertFilterMatches( toInFilter("dim2", ""), ImmutableList.of("b", "c", "f") ); } else { assertFilterMatches( toInFilter("dim2", null), ImmutableList.of("b", "f") ); assertFilterMatches( toInFilter("dim2", null, "a"), ImmutableList.of("a", "b", "d", "f") ); assertFilterMatches( toInFilter("dim2", null, "b"), ImmutableList.of("a", "b", "f") ); assertFilterMatches( toInFilter("dim2", ""), ImmutableList.of("c") ); } assertFilterMatches( toInFilter("dim2", "", (String) null), ImmutableList.of("b", "c", "f") ); assertFilterMatches( toInFilter("dim2", "c"), ImmutableList.of("e") ); assertFilterMatches( toInFilter("dim2", "d"), ImmutableList.of() ); } @Test public void testMissingColumn() { assertFilterMatches( toInFilter("dim3", null, (String) null), ImmutableList.of("a", "b", "c", "d", "e", "f") ); if (NullHandling.replaceWithDefault()) { assertFilterMatches( toInFilter("dim3", ""), ImmutableList.of("a", "b", "c", "d", "e", "f") ); } else { assertFilterMatches( toInFilter("dim3", ""), ImmutableList.of() ); } assertFilterMatches( toInFilter("dim3", null, "a"), ImmutableList.of("a", "b", "c", "d", "e", "f") ); assertFilterMatches( toInFilter("dim3", "a"), ImmutableList.of() ); assertFilterMatches( toInFilter("dim3", "b"), ImmutableList.of() ); assertFilterMatches( toInFilter("dim3", "c"), ImmutableList.of() ); } @Test public void testMatchWithExtractionFn() { String extractionJsFn = "function(str) { return 'super-' + str; }"; ExtractionFn superFn = new JavaScriptExtractionFn(extractionJsFn, false, JavaScriptConfig.getEnabledInstance()); String nullJsFn = "function(str) { if (str === null) { return 'YES'; } else { return 'NO';} }"; ExtractionFn yesNullFn = new JavaScriptExtractionFn(nullJsFn, false, JavaScriptConfig.getEnabledInstance()); if (NullHandling.replaceWithDefault()) { assertFilterMatches( toInFilterWithFn("dim2", superFn, "super-null", "super-a", "super-b"), ImmutableList.of("a", "b", "c", "d", "f") ); assertFilterMatches( toInFilterWithFn("dim1", superFn, "super-null", "super-10", "super-def"), ImmutableList.of("a", "b", "e") ); assertFilterMatches( toInFilterWithFn("dim2", yesNullFn, "YES"), ImmutableList.of("b", "c", "f") ); assertFilterMatches( toInFilterWithFn("dim1", yesNullFn, "NO"), ImmutableList.of("b", "c", "d", "e", "f") ); } else { assertFilterMatches( toInFilterWithFn("dim2", superFn, "super-null", "super-a", "super-b"), ImmutableList.of("a", "b", "d", "f") ); assertFilterMatches( toInFilterWithFn("dim1", superFn, "super-null", "super-10", "super-def"), ImmutableList.of("b", "e") ); assertFilterMatches( toInFilterWithFn("dim2", yesNullFn, "YES"), ImmutableList.of("b", "f") ); assertFilterMatches( toInFilterWithFn("dim1", yesNullFn, "NO"), ImmutableList.of("a", "b", "c", "d", "e", "f") ); } assertFilterMatches( toInFilterWithFn("dim3", yesNullFn, "NO"), ImmutableList.of() ); assertFilterMatches( toInFilterWithFn("dim3", yesNullFn, "YES"), ImmutableList.of("a", "b", "c", "d", "e", "f") ); } @Test public void testMatchWithLookupExtractionFn() { final Map<String, String> stringMap = ImmutableMap.of( "a", "HELLO", "10", "HELLO", "def", "HELLO", "c", "BYE" ); LookupExtractor mapExtractor = new MapLookupExtractor(stringMap, false); LookupExtractionFn lookupFn = new LookupExtractionFn(mapExtractor, false, "UNKNOWN", false, true); assertFilterMatches(toInFilterWithFn("dim0", lookupFn, null, "HELLO"), ImmutableList.of("a")); assertFilterMatches(toInFilterWithFn("dim0", lookupFn, "HELLO", "BYE"), ImmutableList.of("a", "c")); assertFilterMatches(toInFilterWithFn("dim0", lookupFn, "UNKNOWN"), ImmutableList.of("b", "d", "e", "f")); assertFilterMatches(toInFilterWithFn("dim1", lookupFn, "HELLO"), ImmutableList.of("b", "e")); assertFilterMatches(toInFilterWithFn("dim1", lookupFn, "N/A"), ImmutableList.of()); assertFilterMatches(toInFilterWithFn("dim2", lookupFn, "a"), ImmutableList.of()); assertFilterMatches(toInFilterWithFn("dim2", lookupFn, "HELLO"), ImmutableList.of("a", "d")); assertFilterMatches(toInFilterWithFn("dim2", lookupFn, "HELLO", "BYE", "UNKNOWN"), ImmutableList.of("a", "b", "c", "d", "e", "f")); final Map<String, String> stringMap2 = ImmutableMap.of( "a", "e" ); LookupExtractor mapExtractor2 = new MapLookupExtractor(stringMap2, false); LookupExtractionFn lookupFn2 = new LookupExtractionFn(mapExtractor2, true, null, false, true); assertFilterMatches(toInFilterWithFn("dim0", lookupFn2, null, "e"), ImmutableList.of("a", "e")); assertFilterMatches(toInFilterWithFn("dim0", lookupFn2, "a"), ImmutableList.of()); final Map<String, String> stringMap3 = ImmutableMap.of( "c", "500", "100", "e" ); LookupExtractor mapExtractor3 = new MapLookupExtractor(stringMap3, false); LookupExtractionFn lookupFn3 = new LookupExtractionFn(mapExtractor3, false, null, false, true); assertFilterMatches(toInFilterWithFn("dim0", lookupFn3, null, "c"), ImmutableList.of("a", "b", "d", "e", "f")); assertFilterMatches(toInFilterWithFn("dim0", lookupFn3, "e"), ImmutableList.of()); } @Test public void testRequiredColumnRewrite() { InFilter filter = (InFilter) toInFilter("dim0", "a", "c").toFilter(); InFilter filter2 = (InFilter) toInFilter("dim1", "a", "c").toFilter(); Assert.assertTrue(filter.supportsRequiredColumnRewrite()); Assert.assertTrue(filter2.supportsRequiredColumnRewrite()); Filter rewrittenFilter = filter.rewriteRequiredColumns(ImmutableMap.of("dim0", "dim1")); Assert.assertEquals(filter2, rewrittenFilter); expectedException.expect(IAE.class); expectedException.expectMessage("Received a non-applicable rewrite: {invalidName=dim1}, filter's dimension: dim0"); filter.rewriteRequiredColumns(ImmutableMap.of("invalidName", "dim1")); } @Test public void test_equals() { EqualsVerifier.forClass(InFilter.class) .usingGetClass() .withNonnullFields("dimension", "values") .withIgnoredFields("longPredicateSupplier", "floatPredicateSupplier", "doublePredicateSupplier") .verify(); } @Test public void test_equals_forInFilterDruidPredicateFactory() { EqualsVerifier.forClass(InFilter.InFilterDruidPredicateFactory.class) .usingGetClass() .withNonnullFields("values") .withIgnoredFields("longPredicateSupplier", "floatPredicateSupplier", "doublePredicateSupplier") .verify(); } private DimFilter toInFilter(String dim) { List<String> emptyList = new ArrayList<>(); return new InDimFilter(dim, emptyList, null); } private DimFilter toInFilter(String dim, String value, String... values) { return new InDimFilter(dim, Lists.asList(value, values), null); } private DimFilter toInFilterWithFn(String dim, ExtractionFn fn, String value, String... values) { return new InDimFilter(dim, Lists.asList(value, values), fn); } }
package com.biddster.betterfood; import android.app.ActionBar; import android.app.Activity; import android.app.Fragment; import android.content.Context; import android.content.SharedPreferences; import android.content.res.Configuration; import android.os.Bundle; import android.preference.PreferenceManager; import android.support.v4.app.ActionBarDrawerToggle; import android.support.v4.view.GravityCompat; import android.support.v4.widget.DrawerLayout; import android.util.Pair; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.ArrayAdapter; import android.widget.ListView; import android.widget.TextView; import static com.biddster.betterfood.Logger.logException; /** * Fragment used for managing interactions for and presentation of a navigation drawer. * See the <a href="https://developer.android.com/design/patterns/navigation-drawer.html#Interaction"> * design guidelines</a> for a complete explanation of the behaviors implemented here. */ @SuppressWarnings("unchecked") public class NavigationDrawerFragment extends Fragment { private static final String STATE_SELECTED_POSITION = "selected_navigation_drawer_position"; private static final String PREF_USER_LEARNED_DRAWER = "navigation_drawer_learned"; private NavigationDrawerCallbacks mCallbacks; private ActionBarDrawerToggle mDrawerToggle; private DrawerLayout mDrawerLayout; private ListView mDrawerListView; private View mFragmentContainerView; private int mCurrentSelectedPosition = 0; private boolean mFromSavedInstanceState; private boolean mUserLearnedDrawer; @Override public void onCreate(final Bundle savedInstanceState) { super.onCreate(savedInstanceState); final SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(getActivity()); mUserLearnedDrawer = sp.getBoolean(PREF_USER_LEARNED_DRAWER, false); if (savedInstanceState != null) { mCurrentSelectedPosition = savedInstanceState.getInt(STATE_SELECTED_POSITION); mFromSavedInstanceState = true; } // Select either the default item (0) or the last selected item. selectItem(mCurrentSelectedPosition); } @Override public void onActivityCreated(final Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); // Indicate that this fragment would like to influence the set of actions in the action bar. setHasOptionsMenu(true); } @Override public View onCreateView(final LayoutInflater inflater, final ViewGroup container, final Bundle savedInstanceState) { mDrawerListView = (ListView) inflater.inflate(R.layout.fragment_navigation_drawer, container, false); mDrawerListView.setOnItemClickListener(new AdapterView.OnItemClickListener() { @Override public void onItemClick(final AdapterView<?> parent, final View view, final int position, final long id) { selectItem(position); } }); mDrawerListView.setAdapter(new PairArrayAdapter(getActionBar().getThemedContext(), new Pair<>(getString(R.string.action_home), "http://www.bbcgoodfood.com/"), new Pair<>(getString(R.string.title_section1), "http://www.bbcgoodfood.com/recipes/category/healthy"), new Pair<>(getString(R.string.title_section2), "http://www.bbcgoodfood.com/feature/family-and-kids"), new Pair<>(getString(R.string.title_section3), "http://www.bbcgoodfood.com/recipes/category/cakes-baking"), new Pair<>(getString(R.string.title_section4), "http://www.bbcgoodfood.com/recipes/category/cuisines"), new Pair<>(getString(R.string.title_section5), "http://www.bbcgoodfood.com/recipes/category/dishes"), new Pair<>(getString(R.string.title_section6), "http://www.bbcgoodfood.com/recipes/category/events"), new Pair<>(getString(R.string.title_section7), "http://www.bbcgoodfood.com/recipes/category/everyday"), new Pair<>(getString(R.string.title_section8), "http://www.bbcgoodfood.com/recipes/category/ingredients"))); mDrawerListView.setItemChecked(mCurrentSelectedPosition, true); return mDrawerListView; } public boolean isDrawerOpen() { return mDrawerLayout != null && mDrawerLayout.isDrawerOpen(mFragmentContainerView); } /** * Users of this fragment must call this method to set up the navigation drawer interactions. * * @param fragmentId The android:id of this fragment in its activity's layout. * @param drawerLayout The DrawerLayout containing this fragment's UI. */ public void setUp(final int fragmentId, final DrawerLayout drawerLayout) { mFragmentContainerView = getActivity().findViewById(fragmentId); mDrawerLayout = drawerLayout; // set a custom shadow that overlays the main content when the drawer opens mDrawerLayout.setDrawerShadow(R.drawable.drawer_shadow, GravityCompat.START); // set up the drawer's list view with items and click listener final ActionBar actionBar = getActionBar(); actionBar.setDisplayHomeAsUpEnabled(true); actionBar.setHomeButtonEnabled(true); // ActionBarDrawerToggle ties together the the proper interactions // between the navigation drawer and the action bar app icon. mDrawerToggle = new ActionBarDrawerToggle( getActivity(), /* host Activity */ mDrawerLayout, /* DrawerLayout object */ R.drawable.ic_drawer, /* nav drawer image to replace 'Up' caret */ R.string.navigation_drawer_open, /* "open drawer" description for accessibility */ R.string.navigation_drawer_close /* "close drawer" description for accessibility */ ) { @Override public void onDrawerClosed(final View drawerView) { super.onDrawerClosed(drawerView); if (!isAdded()) { return; } getActivity().invalidateOptionsMenu(); // calls onPrepareOptionsMenu() } @Override public void onDrawerOpened(final View drawerView) { super.onDrawerOpened(drawerView); if (!isAdded()) { return; } if (!mUserLearnedDrawer) { // The user manually opened the drawer; store this flag to prevent auto-showing // the navigation drawer automatically in the future. mUserLearnedDrawer = true; final SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(getActivity()); sp.edit().putBoolean(PREF_USER_LEARNED_DRAWER, true).apply(); } getActivity().invalidateOptionsMenu(); // calls onPrepareOptionsMenu() } }; // If the user hasn't 'learned' about the drawer, open it to introduce them to the drawer, // per the navigation drawer design guidelines. if (!mUserLearnedDrawer && !mFromSavedInstanceState) { mDrawerLayout.openDrawer(mFragmentContainerView); } // Defer code dependent on restoration of previous instance state. mDrawerLayout.post(new Runnable() { @Override public void run() { mDrawerToggle.syncState(); } }); mDrawerLayout.setDrawerListener(mDrawerToggle); } private void selectItem(final int position) { mCurrentSelectedPosition = position; if (mDrawerListView != null) { mDrawerListView.setItemChecked(position, true); if (mCallbacks != null) { final Pair<String, String> item = ((PairArrayAdapter) mDrawerListView.getAdapter()).getItem(position); mCallbacks.loadUrl(item.second); } } if (mDrawerLayout != null) { mDrawerLayout.closeDrawer(mFragmentContainerView); } } @Override public void onAttach(final Activity activity) { super.onAttach(activity); try { mCallbacks = (NavigationDrawerCallbacks) activity; } catch (final ClassCastException e) { logException(e); throw new ClassCastException("Activity must implement NavigationDrawerCallbacks."); } } @Override public void onDetach() { super.onDetach(); mCallbacks = null; } @Override public void onSaveInstanceState(final Bundle outState) { super.onSaveInstanceState(outState); outState.putInt(STATE_SELECTED_POSITION, mCurrentSelectedPosition); } @Override public void onConfigurationChanged(final Configuration newConfig) { super.onConfigurationChanged(newConfig); // Forward the new configuration the drawer toggle component. mDrawerToggle.onConfigurationChanged(newConfig); } @Override public void onCreateOptionsMenu(final Menu menu, final MenuInflater inflater) { // If the drawer is open, show the global app actions in the action bar. See also // showGlobalContextActionBar, which controls the top-left area of the action bar. if (mDrawerLayout != null && isDrawerOpen()) { inflater.inflate(R.menu.global, menu); showGlobalContextActionBar(); } super.onCreateOptionsMenu(menu, inflater); } @Override public boolean onOptionsItemSelected(final MenuItem item) { if (mDrawerToggle.onOptionsItemSelected(item)) { return true; } return super.onOptionsItemSelected(item); } /** * Per the navigation drawer design guidelines, updates the action bar to show the global app * 'context', rather than just what's in the current screen. */ private void showGlobalContextActionBar() { final ActionBar actionBar = getActionBar(); actionBar.setDisplayShowTitleEnabled(true); actionBar.setNavigationMode(ActionBar.NAVIGATION_MODE_STANDARD); actionBar.setTitle(R.string.app_name); } private ActionBar getActionBar() { return (getActivity()).getActionBar(); } /** * Callbacks interface that all activities using this fragment must implement. */ public interface NavigationDrawerCallbacks { /** * Called when an item in the navigation drawer is selected. * * @param url */ void loadUrl(final String url); } public void hide() { if (mDrawerLayout != null) { mDrawerLayout.closeDrawer(mFragmentContainerView); } } public void clearDrawer() { mDrawerListView.clearChoices(); } private static class PairArrayAdapter extends ArrayAdapter<Pair<String, String>> { public PairArrayAdapter(final Context context, final Pair<String, String>... pairs) { super(context, android.R.layout.simple_list_item_activated_1, pairs); } @Override public View getView(final int position, View convertView, final ViewGroup parent) { final Pair<String, String> object = super.getItem(position); if (convertView == null) { final int layoutResource = android.R.layout.simple_list_item_1; convertView = LayoutInflater.from(getContext()).inflate(layoutResource, null); } final TextView tv = (TextView) convertView.findViewById(android.R.id.text1); tv.setText(object.first); return convertView; } } }
/** * Original iOS version by Jens Alfke * Ported to Android by Marty Schoch * * Copyright (c) 2012 Couchbase, Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.couchbase.lite; import com.couchbase.lite.internal.AttachmentInternal; import com.couchbase.lite.internal.InterfaceAudience; import com.couchbase.lite.util.Log; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.net.URL; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.zip.GZIPInputStream; /** * A Couchbase Lite Document Attachment. */ public final class Attachment { /** * The owning document revision. */ private Revision revision; /** * Whether or not this attachment is gzipped */ private boolean gzipped; /** * The filename. */ private String name; /** * The CouchbaseLite metadata about the attachment, that lives in the document. */ private Map<String, Object> metadata; /** * The body data. */ private InputStream body; /** * Constructor */ @InterfaceAudience.Private /* package */ Attachment(InputStream contentStream, String contentType) { this.body = contentStream; metadata = new HashMap<String, Object>(); metadata.put("content_type", contentType); metadata.put("follows", true); this.gzipped = false; } /** * Constructor */ @InterfaceAudience.Private /* package */ Attachment(Revision revision, String name, Map<String, Object> metadata) { this.revision = revision; this.name = name; this.metadata = metadata; this.gzipped = false; } /** * Get the owning document revision. */ @InterfaceAudience.Public public Revision getRevision() { return revision; } /** * Get the owning document. */ @InterfaceAudience.Public public Document getDocument() { return revision.getDocument(); } /** * Get the filename. */ @InterfaceAudience.Public public String getName() { return name; } /** * Get the MIME type of the contents. */ @InterfaceAudience.Public public String getContentType() { return (String) metadata.get("content_type"); } /** * Get the content (aka 'body') data. * @throws CouchbaseLiteException */ @InterfaceAudience.Public public InputStream getContent() throws CouchbaseLiteException { if (body != null) { return body; } else { Database db = revision.getDatabase(); long sequence = getAttachmentSequence(); if (sequence == 0) { throw new CouchbaseLiteException(Status.INTERNAL_SERVER_ERROR); } Attachment attachment = db.getAttachmentForSequence(sequence, this.name); body = attachment.getBodyIfNew(); if (attachment.getGZipped()) { // Client does not expect a gzipped stream. // Only Router handles gzipped streams and uses getAttachmentForSequence directly. try { body = new GZIPInputStream(body); } catch (IOException e) { throw new CouchbaseLiteException(e.getMessage(), Status.STATUS_ATTACHMENT_ERROR); } } gzipped = false; return body; } } private long getAttachmentSequence() { long sequence = revision.getSequence(); if (sequence == 0) { sequence = revision.getParentSequence(); } return sequence; } /** * This is just for compatibility with iOS implementation. * * @exclude */ @InterfaceAudience.Private public URL getContentURL(){ try { long sequence = getAttachmentSequence(); if (sequence > 0) { Database db = revision.getDatabase(); //Attachment attachment = db.getAttachmentForSequence(sequence, this.name); String path = db.getAttachmentPathForSequence(sequence, this.name); if (path != null) { return new File(path).toURI().toURL(); } } } catch(Exception e){ Log.d(Log.TAG_DATABASE, e.getMessage()); } return null; } /** * Get the length in bytes of the contents. */ @InterfaceAudience.Public public long getLength() { Number length = (Number) metadata.get("length"); if (length != null) { return length.longValue(); } else { return 0; } } /** * The CouchbaseLite metadata about the attachment, that lives in the document. */ @InterfaceAudience.Public public Map<String, Object> getMetadata() { return Collections.unmodifiableMap(metadata); } @InterfaceAudience.Private /* package */ void setName(String name) { this.name = name; } @InterfaceAudience.Private /* package */ void setRevision(Revision revision) { this.revision = revision; } @InterfaceAudience.Private /* package */ InputStream getBodyIfNew() { return body; } /** * Goes through an _attachments dictionary and replaces any values that are Attachment objects * with proper JSON metadata dicts. It registers the attachment bodies with the blob store and sets * the metadata 'digest' and 'follows' properties accordingly. */ @InterfaceAudience.Private /* package */ static Map<String, Object> installAttachmentBodies(Map<String, Object> attachments, Database database) throws CouchbaseLiteException { Map<String, Object> updatedAttachments = new HashMap<String, Object>(); for (String name : attachments.keySet()) { Object value = attachments.get(name); if (value instanceof Attachment) { Attachment attachment = (Attachment) value; Map<String, Object> metadataMutable = new HashMap<String, Object>(); metadataMutable.putAll(attachment.getMetadata()); InputStream body = attachment.getBodyIfNew(); if (body != null) { // Copy attachment body into the database's blob store: BlobStoreWriter writer; try { writer = blobStoreWriterForBody(body, database); } catch (IOException e) { throw new CouchbaseLiteException(e.getMessage(), Status.STATUS_ATTACHMENT_ERROR); } metadataMutable.put("length", (long)writer.getLength()); metadataMutable.put("digest", writer.mD5DigestString()); metadataMutable.put("follows", true); database.rememberAttachmentWriter(writer); } updatedAttachments.put(name, metadataMutable); } else if (value instanceof AttachmentInternal) { throw new IllegalArgumentException("AttachmentInternal objects not expected here. Could indicate a bug"); } else if (value != null) { updatedAttachments.put(name, value); } } return updatedAttachments; } @InterfaceAudience.Private /* package */ static BlobStoreWriter blobStoreWriterForBody(InputStream body, Database database) throws IOException { BlobStoreWriter writer = database.getAttachmentWriter(); try { writer.read(body); writer.finish(); } catch (IOException e) { writer.cancel(); throw e; } return writer; } /** * NOTE: getGZipped() can return correct value if Attachment is returned from Database.getAttachmentForSequence(long, String). * This method should be internal use only. * * @exclude */ @InterfaceAudience.Private public boolean getGZipped() { return gzipped; } /** * @exclude */ @InterfaceAudience.Private public void setGZipped(boolean gzipped) { this.gzipped = gzipped; } }
// Modifications copyright (C) 2017, Baidu.com, Inc. // Copyright 2017 The Apache Software Foundation // Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package com.baidu.palo.common; import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; import org.apache.commons.pool2.impl.GenericKeyedObjectPoolConfig; import org.apache.thrift.TException; import org.apache.thrift.TProcessor; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; import com.baidu.palo.thrift.BackendService; import com.baidu.palo.thrift.PaloInternalServiceVersion; import com.baidu.palo.thrift.TAgentPublishRequest; import com.baidu.palo.thrift.TAgentResult; import com.baidu.palo.thrift.TAgentTaskRequest; import com.baidu.palo.thrift.TCancelPlanFragmentParams; import com.baidu.palo.thrift.TCancelPlanFragmentResult; import com.baidu.palo.thrift.TDeleteEtlFilesRequest; import com.baidu.palo.thrift.TExecPlanFragmentParams; import com.baidu.palo.thrift.TExecPlanFragmentResult; import com.baidu.palo.thrift.TExportStatusResult; import com.baidu.palo.thrift.TExportTaskRequest; import com.baidu.palo.thrift.TFetchAllPullLoadTaskInfosResult; import com.baidu.palo.thrift.TFetchDataParams; import com.baidu.palo.thrift.TFetchDataResult; import com.baidu.palo.thrift.TFetchPullLoadTaskInfoResult; import com.baidu.palo.thrift.TMiniLoadEtlStatusRequest; import com.baidu.palo.thrift.TMiniLoadEtlStatusResult; import com.baidu.palo.thrift.TMiniLoadEtlTaskRequest; import com.baidu.palo.thrift.TNetworkAddress; import com.baidu.palo.thrift.TPullLoadSubTaskInfo; import com.baidu.palo.thrift.TResultBatch; import com.baidu.palo.thrift.TSnapshotRequest; import com.baidu.palo.thrift.TStatus; import com.baidu.palo.thrift.TTransmitDataParams; import com.baidu.palo.thrift.TTransmitDataResult; import com.baidu.palo.thrift.TUniqueId; public class GenericPoolTest { static GenericPool<BackendService.Client> backendService; static ThriftServer service; static String ip = "127.0.0.1"; static int port = 39401; static void close() { if (service != null) { service.stop(); } } @BeforeClass public static void beforeClass() throws IOException { try { GenericKeyedObjectPoolConfig config = new GenericKeyedObjectPoolConfig(); config.setLifo(true); // set Last In First Out strategy config.setMaxIdlePerKey(2); // (default 8) config.setMinIdlePerKey(0); // (default 0) config.setMaxTotalPerKey(2); // (default 8) config.setMaxTotal(3); // (default -1) config.setMaxWaitMillis(500); // new ClientPool backendService = new GenericPool("BackendService", config, 0); // new ThriftService TProcessor tprocessor = new BackendService.Processor<BackendService.Iface>( new InternalProcessor()); service = new ThriftServer(port, tprocessor); service.start(); } catch (Exception e) { e.printStackTrace(); close(); } } @AfterClass public static void afterClass() throws IOException { close(); } private static class InternalProcessor implements BackendService.Iface { public InternalProcessor() { // } @Override public TExecPlanFragmentResult exec_plan_fragment(TExecPlanFragmentParams params) { return new TExecPlanFragmentResult(); } @Override public TCancelPlanFragmentResult cancel_plan_fragment(TCancelPlanFragmentParams params) { return new TCancelPlanFragmentResult(); } @Override public TTransmitDataResult transmit_data(TTransmitDataParams params) { return new TTransmitDataResult(); } @Override public TFetchDataResult fetch_data(TFetchDataParams params) { TFetchDataResult result = new TFetchDataResult(); result.setPacket_num(123); result.setResult_batch(new TResultBatch(new ArrayList<ByteBuffer>(), false, 0)); result.setEos(true); return result; } @Override public TAgentResult submit_tasks(List<TAgentTaskRequest> tasks) throws TException { return null; } @Override public TAgentResult release_snapshot(String snapshot_path) throws TException { return null; } @Override public TAgentResult publish_cluster_state(TAgentPublishRequest request) throws TException { return null; } @Override public TAgentResult submit_etl_task(TMiniLoadEtlTaskRequest request) throws TException { return null; } @Override public TMiniLoadEtlStatusResult get_etl_status(TMiniLoadEtlStatusRequest request) throws TException { return null; } @Override public TAgentResult delete_etl_files(TDeleteEtlFilesRequest request) throws TException { return null; } @Override public TAgentResult make_snapshot(TSnapshotRequest snapshot_request) throws TException { // TODO Auto-generated method stub return null; } @Override public TStatus register_pull_load_task(TUniqueId id, int num_senders) throws TException { // TODO Auto-generated method stub return null; } @Override public TStatus deregister_pull_load_task(TUniqueId id) throws TException { // TODO Auto-generated method stub return null; } @Override public TStatus report_pull_load_sub_task_info(TPullLoadSubTaskInfo task_info) throws TException { // TODO Auto-generated method stub return null; } @Override public TFetchPullLoadTaskInfoResult fetch_pull_load_task_info(TUniqueId id) throws TException { // TODO Auto-generated method stub return null; } @Override public TFetchAllPullLoadTaskInfosResult fetch_all_pull_load_task_infos() throws TException { // TODO Auto-generated method stub return null; } @Override public TStatus submit_export_task(TExportTaskRequest request) throws TException { // TODO Auto-generated method stub return null; } @Override public TExportStatusResult get_export_status(TUniqueId task_id) throws TException { // TODO Auto-generated method stub return null; } @Override public TStatus erase_export_task(TUniqueId task_id) throws TException { // TODO Auto-generated method stub return null; } } @Test public void testNormal() throws Exception { TNetworkAddress address = new TNetworkAddress(ip, port); BackendService.Client object = backendService.borrowObject(address); TFetchDataResult result = object.fetch_data(new TFetchDataParams( PaloInternalServiceVersion.V1, new TUniqueId())); Assert.assertEquals(result.getPacket_num(), 123); backendService.returnObject(address, object); } @Test public void testSetMaxPerKey() throws Exception { TNetworkAddress address = new TNetworkAddress(ip, port); BackendService.Client object1; BackendService.Client object2; BackendService.Client object3; // first success object1 = backendService.borrowObject(address); // second success object2 = backendService.borrowObject(address); // third fail, because the max connection is 2 boolean flag = false; try { object3 = backendService.borrowObject(address); } catch (java.util.NoSuchElementException e) { flag = true; // pass } catch (Exception e) { // can't get here Assert.fail(); } Assert.assertTrue(flag); // fouth success, beacuse we drop the object1 backendService.returnObject(address, object1); object3 = null; object3 = backendService.borrowObject(address); Assert.assertTrue(object3 != null); backendService.returnObject(address, object2); backendService.returnObject(address, object3); } @Test public void testException() throws Exception { TNetworkAddress address = new TNetworkAddress(ip, port); BackendService.Client object; // borrow null boolean flag = false; try { object = backendService.borrowObject(null); } catch (NullPointerException e) { flag = true; } Assert.assertTrue(flag); flag = false; // return twice object = backendService.borrowObject(address); backendService.returnObject(address, object); try { backendService.returnObject(address, object); } catch (java.lang.IllegalStateException e) { flag = true; } Assert.assertTrue(flag); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.calcite.util; import org.apache.calcite.runtime.Utilities; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.common.primitives.Ints; import org.junit.jupiter.api.Test; import java.nio.LongBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; import java.util.TreeSet; import static org.hamcrest.CoreMatchers.anyOf; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.sameInstance; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; /** * Unit test for {@link org.apache.calcite.util.ImmutableBitSet}. */ class ImmutableBitSetTest { /** Tests the method {@link ImmutableBitSet#iterator()}. */ @Test void testIterator() { assertToIterBitSet("", ImmutableBitSet.of()); assertToIterBitSet("0", ImmutableBitSet.of(0)); assertToIterBitSet("0, 1", ImmutableBitSet.of(0, 1)); assertToIterBitSet("10", ImmutableBitSet.of(10)); } /** * Tests that iterating over an * {@link org.apache.calcite.util.ImmutableBitSet} yields the expected string. * * @param expected Expected string * @param bitSet Bit set */ private void assertToIterBitSet(String expected, ImmutableBitSet bitSet) { StringBuilder buf = new StringBuilder(); for (int i : bitSet) { if (buf.length() > 0) { buf.append(", "); } buf.append(Integer.toString(i)); } assertEquals(expected, buf.toString()); } /** * Tests the method * {@link org.apache.calcite.util.ImmutableBitSet#toList()}. */ @Test void testToList() { assertThat(ImmutableBitSet.of().toList(), equalTo(Collections.<Integer>emptyList())); assertThat(ImmutableBitSet.of(5).toList(), equalTo(Arrays.asList(5))); assertThat(ImmutableBitSet.of(3, 5).toList(), equalTo(Arrays.asList(3, 5))); assertThat(ImmutableBitSet.of(63).toList(), equalTo(Arrays.asList(63))); assertThat(ImmutableBitSet.of(64).toList(), equalTo(Arrays.asList(64))); assertThat(ImmutableBitSet.of(3, 63).toList(), equalTo(Arrays.asList(3, 63))); assertThat(ImmutableBitSet.of(3, 64).toList(), equalTo(Arrays.asList(3, 64))); assertThat(ImmutableBitSet.of(0, 4, 2).toList(), equalTo(Arrays.asList(0, 2, 4))); } /** * Tests the method {@link BitSets#range(int, int)}. */ @Test void testRange() { assertEquals(ImmutableBitSet.range(0, 4).toList(), Arrays.asList(0, 1, 2, 3)); assertEquals(ImmutableBitSet.range(1, 4).toList(), Arrays.asList(1, 2, 3)); assertEquals(ImmutableBitSet.range(4).toList(), Arrays.asList(0, 1, 2, 3)); assertEquals(ImmutableBitSet.range(0).toList(), Collections.<Integer>emptyList()); assertEquals(ImmutableBitSet.range(2, 2).toList(), Collections.<Integer>emptyList()); assertThat(ImmutableBitSet.range(63, 66).toString(), equalTo("{63, 64, 65}")); assertThat(ImmutableBitSet.range(65, 68).toString(), equalTo("{65, 66, 67}")); assertThat(ImmutableBitSet.range(65, 65).toString(), equalTo("{}")); assertThat(ImmutableBitSet.range(65, 65).length(), equalTo(0)); assertThat(ImmutableBitSet.range(65, 165).cardinality(), equalTo(100)); // Same tests as above, using a builder. assertThat(ImmutableBitSet.builder().set(63, 66).build().toString(), equalTo("{63, 64, 65}")); assertThat(ImmutableBitSet.builder().set(65, 68).build().toString(), equalTo("{65, 66, 67}")); assertThat(ImmutableBitSet.builder().set(65, 65).build().toString(), equalTo("{}")); assertThat(ImmutableBitSet.builder().set(65, 65).build().length(), equalTo(0)); assertThat(ImmutableBitSet.builder().set(65, 165).build().cardinality(), equalTo(100)); final ImmutableBitSet e0 = ImmutableBitSet.range(0, 0); final ImmutableBitSet e1 = ImmutableBitSet.of(); assertTrue(e0.equals(e1)); assertThat(e0.hashCode(), equalTo(e1.hashCode())); // Empty builder returns the singleton empty set. assertTrue(ImmutableBitSet.builder().build() == ImmutableBitSet.of()); } @Test void testCompare() { final List<ImmutableBitSet> sorted = getSortedList(); for (int i = 0; i < sorted.size(); i++) { for (int j = 0; j < sorted.size(); j++) { final ImmutableBitSet set0 = sorted.get(i); final ImmutableBitSet set1 = sorted.get(j); int c = set0.compareTo(set1); if (c == 0) { assertTrue(i == j || i == 3 && j == 4 || i == 4 && j == 3); } else { assertEquals(c, Utilities.compare(i, j)); } assertEquals(c == 0, set0.equals(set1)); assertEquals(c == 0, set1.equals(set0)); } } } @Test void testCompare2() { final List<ImmutableBitSet> sorted = getSortedList(); sorted.sort(ImmutableBitSet.COMPARATOR); assertThat(sorted.toString(), equalTo("[{0, 1, 3}, {0, 1}, {1, 1000}, {1}, {1}, {2, 3}, {}]")); } private List<ImmutableBitSet> getSortedList() { return Arrays.asList( ImmutableBitSet.of(), ImmutableBitSet.of(0, 1), ImmutableBitSet.of(0, 1, 3), ImmutableBitSet.of(1), ImmutableBitSet.of(1), ImmutableBitSet.of(1, 1000), ImmutableBitSet.of(2, 3)); } /** * Tests the method * {@link org.apache.calcite.util.ImmutableBitSet#toArray}. */ @Test void testToArray() { int[][] arrays = {{}, {0}, {0, 2}, {1, 65}, {100}}; for (int[] array : arrays) { assertThat(ImmutableBitSet.of(array).toArray(), equalTo(array)); } } /** * Tests the methods * {@link org.apache.calcite.util.ImmutableBitSet#toList}, and * {@link org.apache.calcite.util.ImmutableBitSet#asList} and * {@link org.apache.calcite.util.ImmutableBitSet#asSet}. */ @Test void testAsList() { final List<ImmutableBitSet> list = getSortedList(); // create a set of integers in and not in the lists final Set<Integer> integers = new HashSet<>(); for (ImmutableBitSet set : list) { for (Integer integer : set) { integers.add(integer); integers.add(integer + 1); integers.add(integer + 10); } } for (ImmutableBitSet bitSet : list) { final List<Integer> list1 = bitSet.toList(); final List<Integer> listView = bitSet.asList(); final Set<Integer> setView = bitSet.asSet(); assertThat(list1.size(), equalTo(bitSet.cardinality())); assertThat(listView.size(), equalTo(bitSet.cardinality())); assertThat(setView.size(), equalTo(bitSet.cardinality())); assertThat(list1.toString(), equalTo(listView.toString())); assertThat(list1.toString(), equalTo(setView.toString())); assertTrue(list1.equals(listView)); assertThat(list1.hashCode(), equalTo(listView.hashCode())); final Set<Integer> set = new HashSet<>(list1); assertThat(setView.hashCode(), is(set.hashCode())); assertThat(setView, equalTo(set)); for (Integer integer : integers) { final boolean b = list1.contains(integer); assertThat(listView.contains(integer), is(b)); assertThat(setView.contains(integer), is(b)); } } } /** * Tests the method * {@link org.apache.calcite.util.ImmutableBitSet#union(ImmutableBitSet)}. */ @Test void testUnion() { assertThat(ImmutableBitSet.of(1).union(ImmutableBitSet.of(3)).toString(), equalTo("{1, 3}")); assertThat(ImmutableBitSet.of(1).union(ImmutableBitSet.of(3, 100)) .toString(), equalTo("{1, 3, 100}")); ImmutableBitSet x = ImmutableBitSet.of(1) .rebuild() .addAll(ImmutableBitSet.of(2)) .addAll(ImmutableBitSet.of()) .addAll(ImmutableBitSet.of(3)) .build(); assertThat(x.toString(), equalTo("{1, 2, 3}")); } @Test void testIntersect() { assertThat(ImmutableBitSet.of(1, 2, 3, 100, 200) .intersect(ImmutableBitSet.of(2, 100)).toString(), equalTo("{2, 100}")); assertTrue(ImmutableBitSet.of(1, 3, 5, 101, 20001) .intersect(ImmutableBitSet.of(2, 100)) == ImmutableBitSet.of()); } /** * Tests the method * {@link org.apache.calcite.util.ImmutableBitSet#contains(org.apache.calcite.util.ImmutableBitSet)}. */ @Test void testBitSetsContains() { assertTrue(ImmutableBitSet.range(0, 5) .contains(ImmutableBitSet.range(2, 4))); assertTrue(ImmutableBitSet.range(0, 5).contains(ImmutableBitSet.range(4))); assertFalse(ImmutableBitSet.range(0, 5).contains(ImmutableBitSet.of(14))); assertFalse(ImmutableBitSet.range(20, 25).contains(ImmutableBitSet.of(14))); final ImmutableBitSet empty = ImmutableBitSet.of(); assertTrue(ImmutableBitSet.range(20, 25).contains(empty)); assertTrue(empty.contains(empty)); assertFalse(empty.contains(ImmutableBitSet.of(0))); assertFalse(empty.contains(ImmutableBitSet.of(1))); assertFalse(empty.contains(ImmutableBitSet.of(63))); assertFalse(empty.contains(ImmutableBitSet.of(64))); assertFalse(empty.contains(ImmutableBitSet.of(1000))); assertTrue(ImmutableBitSet.of(1, 4, 7) .contains(ImmutableBitSet.of(1, 4, 7))); } /** * Tests the method * {@link org.apache.calcite.util.ImmutableBitSet#of(org.apache.calcite.util.ImmutableIntList)}. */ @Test void testBitSetOfImmutableIntList() { ImmutableIntList list = ImmutableIntList.of(); assertThat(ImmutableBitSet.of(list), equalTo(ImmutableBitSet.of())); list = ImmutableIntList.of(2, 70, 5, 0); assertThat(ImmutableBitSet.of(list), equalTo(ImmutableBitSet.of(0, 2, 5, 70))); } /** * Tests the method * {@link org.apache.calcite.util.ImmutableBitSet#previousClearBit(int)}. */ @Test void testPreviousClearBit() { assertThat(ImmutableBitSet.of().previousClearBit(10), equalTo(10)); assertThat(ImmutableBitSet.of().previousClearBit(0), equalTo(0)); assertThat(ImmutableBitSet.of().previousClearBit(-1), equalTo(-1)); try { final int actual = ImmutableBitSet.of().previousClearBit(-2); fail("expected exception, got " + actual); } catch (IndexOutOfBoundsException e) { // ok } assertThat(ImmutableBitSet.of(0, 1, 3, 4).previousClearBit(4), equalTo(2)); assertThat(ImmutableBitSet.of(0, 1, 3, 4).previousClearBit(3), equalTo(2)); assertThat(ImmutableBitSet.of(0, 1, 3, 4).previousClearBit(2), equalTo(2)); assertThat(ImmutableBitSet.of(0, 1, 3, 4).previousClearBit(1), equalTo(-1)); assertThat(ImmutableBitSet.of(1, 3, 4).previousClearBit(1), equalTo(0)); } @Test void testBuilder() { assertThat(ImmutableBitSet.builder().set(9) .set(100) .set(1000) .clear(250) .set(88) .clear(100) .clear(1000) .build().toString(), equalTo("{9, 88}")); } /** Unit test for * {@link org.apache.calcite.util.ImmutableBitSet.Builder#build(ImmutableBitSet)}. */ @Test void testBuilderUseOriginal() { final ImmutableBitSet fives = ImmutableBitSet.of(5, 10, 15); final ImmutableBitSet fives1 = fives.rebuild().clear(2).set(10).build(); assertTrue(fives1 == fives); final ImmutableBitSet fives2 = ImmutableBitSet.builder().addAll(fives).clear(2).set(10).build(fives); assertTrue(fives2 == fives); final ImmutableBitSet fives3 = ImmutableBitSet.builder().addAll(fives).clear(2).set(10).build(); assertTrue(fives3 != fives); assertTrue(fives3.equals(fives)); assertTrue(fives3.equals(fives2)); } @Test void testIndexOf() { assertThat(ImmutableBitSet.of(0, 2, 4).indexOf(0), equalTo(0)); assertThat(ImmutableBitSet.of(0, 2, 4).indexOf(2), equalTo(1)); assertThat(ImmutableBitSet.of(0, 2, 4).indexOf(3), equalTo(-1)); assertThat(ImmutableBitSet.of(0, 2, 4).indexOf(4), equalTo(2)); assertThat(ImmutableBitSet.of(0, 2, 4).indexOf(5), equalTo(-1)); assertThat(ImmutableBitSet.of(0, 2, 4).indexOf(-1), equalTo(-1)); assertThat(ImmutableBitSet.of(0, 2, 4).indexOf(-2), equalTo(-1)); assertThat(ImmutableBitSet.of().indexOf(-1), equalTo(-1)); assertThat(ImmutableBitSet.of().indexOf(-2), equalTo(-1)); assertThat(ImmutableBitSet.of().indexOf(0), equalTo(-1)); assertThat(ImmutableBitSet.of().indexOf(1000), equalTo(-1)); } /** Tests {@link ImmutableBitSet.Builder#buildAndReset()}. */ @Test void testReset() { final ImmutableBitSet.Builder builder = ImmutableBitSet.builder(); builder.set(2); assertThat(builder.build().toString(), is("{2}")); try { builder.set(4); fail("expected exception"); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), is("can only use builder once")); } try { final ImmutableBitSet bitSet = builder.build(); fail("expected exception, got " + bitSet); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), is("can only use builder once")); } try { final ImmutableBitSet bitSet = builder.buildAndReset(); fail("expected exception, got " + bitSet); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), is("can only use builder once")); } final ImmutableBitSet.Builder builder2 = ImmutableBitSet.builder(); builder2.set(2); assertThat(builder2.buildAndReset().toString(), is("{2}")); assertThat(builder2.buildAndReset().toString(), is("{}")); builder2.set(151); builder2.set(3); assertThat(builder2.buildAndReset().toString(), is("{3, 151}")); } @Test void testNth() { assertThat(ImmutableBitSet.of(0, 2, 4).nth(0), equalTo(0)); assertThat(ImmutableBitSet.of(0, 2, 4).nth(1), equalTo(2)); assertThat(ImmutableBitSet.of(0, 2, 4).nth(2), equalTo(4)); assertThat(ImmutableBitSet.of(0, 2, 63).nth(2), equalTo(63)); assertThat(ImmutableBitSet.of(0, 2, 64).nth(2), equalTo(64)); assertThat(ImmutableBitSet.of(64).nth(0), equalTo(64)); assertThat(ImmutableBitSet.of(64, 65).nth(0), equalTo(64)); assertThat(ImmutableBitSet.of(64, 65).nth(1), equalTo(65)); assertThat(ImmutableBitSet.of(64, 128).nth(1), equalTo(128)); try { ImmutableBitSet.of().nth(0); fail("expected throw"); } catch (IndexOutOfBoundsException e) { // ok } try { ImmutableBitSet.of().nth(1); fail("expected throw"); } catch (IndexOutOfBoundsException e) { // ok } try { ImmutableBitSet.of(64).nth(1); fail("expected throw"); } catch (IndexOutOfBoundsException e) { // ok } try { ImmutableBitSet.of(64).nth(-1); fail("expected throw"); } catch (IndexOutOfBoundsException e) { // ok } } /** Tests the method * {@link org.apache.calcite.util.BitSets#closure(java.util.SortedMap)}. */ @Test void testClosure() { final SortedMap<Integer, ImmutableBitSet> empty = new TreeMap<>(); assertThat(ImmutableBitSet.closure(empty), equalTo(empty)); // Currently you need an entry for each position, otherwise you get an NPE. // We should fix that. final SortedMap<Integer, ImmutableBitSet> map = new TreeMap<>(); map.put(0, ImmutableBitSet.of(3)); map.put(1, ImmutableBitSet.of()); map.put(2, ImmutableBitSet.of(7)); map.put(3, ImmutableBitSet.of(4, 12)); map.put(4, ImmutableBitSet.of()); map.put(5, ImmutableBitSet.of()); map.put(6, ImmutableBitSet.of()); map.put(7, ImmutableBitSet.of()); map.put(8, ImmutableBitSet.of()); map.put(9, ImmutableBitSet.of()); map.put(10, ImmutableBitSet.of()); map.put(11, ImmutableBitSet.of()); map.put(12, ImmutableBitSet.of()); final String original = map.toString(); final String expected = "{0={3, 4, 12}, 1={}, 2={7}, 3={3, 4, 12}, 4={4, 12}, 5={}, 6={}, 7={7}, 8={}, 9={}, 10={}, 11={}, 12={4, 12}}"; assertThat(ImmutableBitSet.closure(map).toString(), equalTo(expected)); assertThat("argument modified", map.toString(), equalTo(original)); // Now a similar map with missing entries. Same result. final SortedMap<Integer, ImmutableBitSet> map2 = new TreeMap<>(); map2.put(0, ImmutableBitSet.of(3)); map2.put(2, ImmutableBitSet.of(7)); map2.put(3, ImmutableBitSet.of(4, 12)); map2.put(9, ImmutableBitSet.of()); final String original2 = map2.toString(); assertThat(ImmutableBitSet.closure(map2).toString(), equalTo(expected)); assertThat("argument modified", map2.toString(), equalTo(original2)); } @Test void testPowerSet() { final ImmutableBitSet empty = ImmutableBitSet.of(); assertThat(Iterables.size(empty.powerSet()), equalTo(1)); assertThat(empty.powerSet().toString(), equalTo("[{}]")); final ImmutableBitSet single = ImmutableBitSet.of(2); assertThat(Iterables.size(single.powerSet()), equalTo(2)); assertThat(single.powerSet().toString(), equalTo("[{}, {2}]")); final ImmutableBitSet two = ImmutableBitSet.of(2, 10); assertThat(Iterables.size(two.powerSet()), equalTo(4)); assertThat(two.powerSet().toString(), equalTo("[{}, {10}, {2}, {2, 10}]")); final ImmutableBitSet seventeen = ImmutableBitSet.range(3, 20); assertThat(Iterables.size(seventeen.powerSet()), equalTo(131072)); } @Test void testCreateLongs() { assertThat(ImmutableBitSet.valueOf(0L), equalTo(ImmutableBitSet.of())); assertThat(ImmutableBitSet.valueOf(0xAL), equalTo(ImmutableBitSet.of(1, 3))); assertThat(ImmutableBitSet.valueOf(0xAL, 0, 0), equalTo(ImmutableBitSet.of(1, 3))); assertThat(ImmutableBitSet.valueOf(0, 0, 0xAL, 0), equalTo(ImmutableBitSet.of(129, 131))); } @Test void testCreateLongBuffer() { assertThat(ImmutableBitSet.valueOf(LongBuffer.wrap(new long[] {})), equalTo(ImmutableBitSet.of())); assertThat(ImmutableBitSet.valueOf(LongBuffer.wrap(new long[] {0xAL})), equalTo(ImmutableBitSet.of(1, 3))); assertThat( ImmutableBitSet.valueOf(LongBuffer.wrap(new long[] {0, 0, 0xAL, 0})), equalTo(ImmutableBitSet.of(129, 131))); } @Test void testToLongArray() { final ImmutableBitSet bitSet = ImmutableBitSet.of(29, 4, 1969); assertThat(ImmutableBitSet.valueOf(bitSet.toLongArray()), equalTo(bitSet)); assertThat(ImmutableBitSet.valueOf(LongBuffer.wrap(bitSet.toLongArray())), equalTo(bitSet)); } @Test void testSet() { final ImmutableBitSet bitSet = ImmutableBitSet.of(29, 4, 1969); final ImmutableBitSet bitSet2 = ImmutableBitSet.of(29, 4, 1969, 30); assertThat(bitSet.set(30), equalTo(bitSet2)); assertThat(bitSet.set(30).set(30), equalTo(bitSet2)); assertThat(bitSet.set(29), equalTo(bitSet)); assertThat(bitSet.setIf(30, false), equalTo(bitSet)); assertThat(bitSet.setIf(30, true), equalTo(bitSet2)); } @Test void testClear() { final ImmutableBitSet bitSet = ImmutableBitSet.of(29, 4, 1969); final ImmutableBitSet bitSet2 = ImmutableBitSet.of(4, 1969); assertThat(bitSet.clear(29), equalTo(bitSet2)); assertThat(bitSet.clear(29).clear(29), equalTo(bitSet2)); assertThat(bitSet.clear(29).clear(4).clear(29).clear(1969), equalTo(ImmutableBitSet.of())); assertThat(bitSet.clearIf(29, false), equalTo(bitSet)); assertThat(bitSet.clearIf(29, true), equalTo(bitSet2)); } @Test void testSet2() { final ImmutableBitSet bitSet = ImmutableBitSet.of(29, 4, 1969); final ImmutableBitSet bitSet2 = ImmutableBitSet.of(29, 4, 1969, 30); assertThat(bitSet.set(30, false), sameInstance(bitSet)); assertThat(bitSet.set(30, true), equalTo(bitSet2)); assertThat(bitSet.set(29, true), sameInstance(bitSet)); } @Test void testShift() { final ImmutableBitSet bitSet = ImmutableBitSet.of(29, 4, 1969); assertThat(bitSet.shift(0), is(bitSet)); assertThat(bitSet.shift(1), is(ImmutableBitSet.of(30, 5, 1970))); assertThat(bitSet.shift(-4), is(ImmutableBitSet.of(25, 0, 1965))); try { final ImmutableBitSet x = bitSet.shift(-5); fail("Expected error, got " + x); } catch (ArrayIndexOutOfBoundsException e) { assertThat(e.getMessage(), anyOf(is("-1"), is("Index -1 out of bounds for length 0"))); } final ImmutableBitSet empty = ImmutableBitSet.of(); assertThat(empty.shift(-100), is(empty)); } @Test void testGet2() { final ImmutableBitSet bitSet = ImmutableBitSet.of(29, 4, 1969); assertThat(bitSet.get(0, 8), is(ImmutableBitSet.of(4))); assertThat(bitSet.get(0, 5), is(ImmutableBitSet.of(4))); assertThat(bitSet.get(0, 4), is(ImmutableBitSet.of())); assertThat(bitSet.get(4, 4), is(ImmutableBitSet.of())); assertThat(bitSet.get(5, 5), is(ImmutableBitSet.of())); assertThat(bitSet.get(4, 5), is(ImmutableBitSet.of(4))); assertThat(bitSet.get(4, 1000), is(ImmutableBitSet.of(4, 29))); assertThat(bitSet.get(4, 32), is(ImmutableBitSet.of(4, 29))); assertThat(bitSet.get(2000, 10000), is(ImmutableBitSet.of())); assertThat(bitSet.get(1000, 10000), is(ImmutableBitSet.of(1969))); assertThat(bitSet.get(5, 10000), is(ImmutableBitSet.of(29, 1969))); assertThat(bitSet.get(65, 10000), is(ImmutableBitSet.of(1969))); final ImmutableBitSet emptyBitSet = ImmutableBitSet.of(); assertThat(emptyBitSet.get(0, 4), is(ImmutableBitSet.of())); assertThat(emptyBitSet.get(0, 0), is(ImmutableBitSet.of())); assertThat(emptyBitSet.get(0, 10000), is(ImmutableBitSet.of())); assertThat(emptyBitSet.get(7, 10000), is(ImmutableBitSet.of())); assertThat(emptyBitSet.get(73, 10000), is(ImmutableBitSet.of())); } /** * Test case for {@link ImmutableBitSet#allContain(Collection, int)}. */ @Test void testAllContain() { ImmutableBitSet set1 = ImmutableBitSet.of(0, 1, 2, 3); ImmutableBitSet set2 = ImmutableBitSet.of(2, 3, 4, 5); ImmutableBitSet set3 = ImmutableBitSet.of(3, 4, 5, 6); Collection<ImmutableBitSet> collection1 = ImmutableList.of(set1, set2, set3); assertTrue(ImmutableBitSet.allContain(collection1, 3)); assertFalse(ImmutableBitSet.allContain(collection1, 0)); Collection<ImmutableBitSet> collection2 = ImmutableList.of(set1, set2); assertTrue(ImmutableBitSet.allContain(collection2, 2)); assertTrue(ImmutableBitSet.allContain(collection2, 3)); assertFalse(ImmutableBitSet.allContain(collection2, 4)); } /** Test case for * {@link org.apache.calcite.util.ImmutableBitSet#toImmutableBitSet()}. */ @Test void testCollector() { checkCollector(0, 20); checkCollector(); checkCollector(1, 63); checkCollector(1, 63, 1); checkCollector(0, 257); checkCollector(1024, 257); } private void checkCollector(int... integers) { final List<Integer> list = Ints.asList(integers); final List<Integer> sortedUniqueList = new ArrayList<>(new TreeSet<>(list)); final ImmutableBitSet bitSet = list.stream().collect(ImmutableBitSet.toImmutableBitSet()); assertThat(bitSet.asList(), is(sortedUniqueList)); } }
package apple.uikit; import apple.NSObject; import apple.foundation.NSArray; import apple.foundation.NSCoder; import apple.foundation.NSDictionary; import apple.foundation.NSMethodSignature; import apple.foundation.NSSet; import apple.foundation.NSUserActivity; import apple.foundation.protocol.NSSecureCoding; import org.moe.natj.c.ann.FunctionPtr; import org.moe.natj.general.NatJ; import org.moe.natj.general.Pointer; import org.moe.natj.general.ann.Generated; import org.moe.natj.general.ann.Library; import org.moe.natj.general.ann.Mapped; import org.moe.natj.general.ann.NInt; import org.moe.natj.general.ann.NUInt; import org.moe.natj.general.ann.Owned; import org.moe.natj.general.ann.Runtime; import org.moe.natj.general.ptr.VoidPtr; import org.moe.natj.objc.Class; import org.moe.natj.objc.ObjCRuntime; import org.moe.natj.objc.SEL; import org.moe.natj.objc.ann.ObjCClassBinding; import org.moe.natj.objc.ann.ProtocolClassMethod; import org.moe.natj.objc.ann.Selector; import org.moe.natj.objc.map.ObjCObjectMapper; @Generated @Library("UIKit") @Runtime(ObjCRuntime.class) @ObjCClassBinding public class UISceneSession extends NSObject implements NSSecureCoding { static { NatJ.register(); } @Generated protected UISceneSession(Pointer peer) { super(peer); } @Generated @Selector("accessInstanceVariablesDirectly") public static native boolean accessInstanceVariablesDirectly(); @Generated @Owned @Selector("alloc") public static native UISceneSession alloc(); @Owned @Generated @Selector("allocWithZone:") public static native UISceneSession allocWithZone(VoidPtr zone); @Generated @Selector("automaticallyNotifiesObserversForKey:") public static native boolean automaticallyNotifiesObserversForKey(String key); @Generated @Selector("cancelPreviousPerformRequestsWithTarget:") public static native void cancelPreviousPerformRequestsWithTarget(@Mapped(ObjCObjectMapper.class) Object aTarget); @Generated @Selector("cancelPreviousPerformRequestsWithTarget:selector:object:") public static native void cancelPreviousPerformRequestsWithTargetSelectorObject( @Mapped(ObjCObjectMapper.class) Object aTarget, SEL aSelector, @Mapped(ObjCObjectMapper.class) Object anArgument); @Generated @Selector("classFallbacksForKeyedArchiver") public static native NSArray<String> classFallbacksForKeyedArchiver(); @Generated @Selector("classForKeyedUnarchiver") public static native Class classForKeyedUnarchiver(); @Generated @Selector("configuration") public native UISceneConfiguration configuration(); @Generated @Selector("debugDescription") public static native String debugDescription_static(); @Generated @Selector("description") public static native String description_static(); @Generated @Selector("encodeWithCoder:") public native void encodeWithCoder(NSCoder coder); @Generated @Selector("hash") @NUInt public static native long hash_static(); @Generated @Selector("init") public native UISceneSession init(); @Generated @Selector("initWithCoder:") public native UISceneSession initWithCoder(NSCoder coder); @Generated @Selector("instanceMethodForSelector:") @FunctionPtr(name = "call_instanceMethodForSelector_ret") public static native NSObject.Function_instanceMethodForSelector_ret instanceMethodForSelector(SEL aSelector); @Generated @Selector("instanceMethodSignatureForSelector:") public static native NSMethodSignature instanceMethodSignatureForSelector(SEL aSelector); @Generated @Selector("instancesRespondToSelector:") public static native boolean instancesRespondToSelector(SEL aSelector); @Generated @Selector("isSubclassOfClass:") public static native boolean isSubclassOfClass(Class aClass); @Generated @Selector("keyPathsForValuesAffectingValueForKey:") public static native NSSet<String> keyPathsForValuesAffectingValueForKey(String key); @Generated @Owned @Selector("new") public static native UISceneSession new_objc(); @Generated @Selector("persistentIdentifier") public native String persistentIdentifier(); @Generated @Selector("resolveClassMethod:") public static native boolean resolveClassMethod(SEL sel); @Generated @Selector("resolveInstanceMethod:") public static native boolean resolveInstanceMethod(SEL sel); @Generated @Selector("role") public native String role(); /** * If already instantiated, the UIScene instance that is represented by this definition. */ @Generated @Selector("scene") public native UIScene scene(); /** * The initial value of stateRestorationActivity may not be immediately available when the scene * is connected, depending on the app's default protection class and the lock state of the device. */ @Generated @Selector("setStateRestorationActivity:") public native void setStateRestorationActivity(NSUserActivity value); /** * objects must be plist types */ @Generated @Selector("setUserInfo:") public native void setUserInfo(NSDictionary<String, ?> value); @Generated @Selector("setVersion:") public static native void setVersion_static(@NInt long aVersion); /** * The initial value of stateRestorationActivity may not be immediately available when the scene * is connected, depending on the app's default protection class and the lock state of the device. */ @Generated @Selector("stateRestorationActivity") public native NSUserActivity stateRestorationActivity(); @Generated @Selector("superclass") public static native Class superclass_static(); @Generated @Selector("supportsSecureCoding") public static native boolean supportsSecureCoding(); @Generated @ProtocolClassMethod("supportsSecureCoding") public boolean _supportsSecureCoding() { return supportsSecureCoding(); } /** * objects must be plist types */ @Generated @Selector("userInfo") public native NSDictionary<String, ?> userInfo(); @Generated @Selector("version") @NInt public static native long version_static(); }
/* The contents of this file are subject to the license and copyright terms * detailed in the license directory at the root of the source tree (also * available online at http://fedora-commons.org/license/). */ package org.fcrepo.server.access; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.OutputStreamWriter; import java.io.PipedReader; import java.io.PipedWriter; import java.net.URLDecoder; import java.text.ParseException; import java.util.Date; import java.util.Enumeration; import java.util.Hashtable; import javax.servlet.ServletException; import javax.servlet.ServletOutputStream; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.ws.rs.core.HttpHeaders; import javax.xml.transform.Templates; import javax.xml.transform.Transformer; import javax.xml.transform.stream.StreamResult; import javax.xml.transform.stream.StreamSource; import org.apache.http.HttpStatus; import org.fcrepo.common.Constants; import org.fcrepo.server.Context; import org.fcrepo.server.ReadOnlyContext; import org.fcrepo.server.errors.DatastreamNotFoundException; import org.fcrepo.server.errors.DisseminationException; import org.fcrepo.server.errors.GeneralException; import org.fcrepo.server.errors.MethodNotFoundException; import org.fcrepo.server.errors.ObjectNotFoundException; import org.fcrepo.server.errors.ObjectNotInLowlevelStorageException; import org.fcrepo.server.errors.ServerException; import org.fcrepo.server.errors.StreamIOException; import org.fcrepo.server.errors.authorization.AuthzException; import org.fcrepo.server.errors.servletExceptionExtensions.InternalError500Exception; import org.fcrepo.server.errors.servletExceptionExtensions.NotFound404Exception; import org.fcrepo.server.errors.servletExceptionExtensions.RootException; import org.fcrepo.server.storage.types.MIMETypedStream; import org.fcrepo.server.storage.types.Property; import org.fcrepo.server.utilities.StreamUtility; import org.fcrepo.utilities.DateUtility; import org.fcrepo.utilities.XmlTransformUtility; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Implements the three methods GetObjectProfile, GetDissemination, and * GetDatastreamDissemination of the Fedora Access LITE (API-A-LITE) interface * using a java servlet front end. The syntax defined by API-A-LITE defines * three bindings for these methods: * * GetDissemination URL syntax: * <p> * protocol://hostname:port/fedora/get/PID/sDefPID/methodName[/dateTime][? * parmArray] * </p> * <p> * This syntax requests a dissemination of the specified object using the * specified method of the associated service definition object. The result is * returned as a MIME-typed stream. * </p> * <ul> * <li>protocol - either http or https.</li> * <li>hostname - required hostname of the Fedora server.</li> * <li>port - required port number on which the Fedora server is running.</li> * <li>fedora - required path name for the Fedora access service.</li> * <li>get - required path name for the Fedora service.</li> * <li>PID - required persistent idenitifer of the digital object.</li> * <li>sDefPID - required persistent identifier of the service definition object * to which the digital object subscribes.</li> * <li>methodName - required name of the method to be executed.</li> * <li>dateTime - optional dateTime value indicating dissemination of a version * of the digital object at the specified point in time. * <li>parmArray - optional array of method parameters consisting of name/value * pairs in the form parm1=value1&amp;parm2=value2...</li> * </ul> * * GetObjectProfile URL syntax: * <p> * protocol://hostname:port/fedora/get/PID[/dateTime][?xml=BOOLEAN] * </p> * <p> * This syntax requests an object profile for the specified digital object. The * xml parameter determines the type of output returned. If the parameter is * omitted or has a value of "false", a MIME-typed stream consisting of an html * table is returned providing a browser-savvy means of viewing the object * profile. If the value specified is "true", then a MIME-typed stream * consisting of XML is returned. * </p> * <ul> * <li>protocol - either http or https</li> * <li>hostname - required hostname of the Fedora server.</li> * <li>port - required port number on which the Fedora server is running.</li> * <li>fedora - required name of the Fedora access service.</li> * <li>get - required verb of the Fedora service.</li> * <li>PID - required persistent identifier of the digital object.</li> * <li>dateTime - optional dateTime value indicating dissemination of a version * of the digital object at the specified point in time. * <li>xml - an optional parameter indicating the requested output format. A * value of "true" indicates a return type of text/xml; the absence of the xml * parameter or a value of "false" indicates format is to be text/html.</li> * </ul> * * GetDatastreamDissemination URL syntax: * <p> * protocol://hostname:port/fedora/get/PID/DSID[/dateTime] * </p><p> * This syntax requests a datastream dissemination for the specified digital * object. It is used to return the contents of a datastream. * </p> * <ul> * <li>protocol - either http or https.</li> * <li>hostname - required hostname of the Fedora server.</li> * <li>port - required port number on which the Fedora server is running.</li> * <li>fedora - required name of the Fedora access service.</li> * <li>get - required verb of the Fedora service.</li> * <li>PID - required persistent identifier of the digital object.</li> * <li>DSID - required datastream identifier for the datastream.</li> * <li>dateTime - optional dateTime value indicating dissemination of a version * of the digital object at the specified point in time. * </ul> * * @author Ross Wayland */ public class FedoraAccessServlet extends SpringAccessServlet implements Constants { private static final Logger logger = LoggerFactory.getLogger(FedoraAccessServlet.class); private static final long serialVersionUID = 1L; /** Content type for html. */ private static final String CONTENT_TYPE_HTML = "text/html; charset=UTF-8"; /** Content type for xml. */ private static final String CONTENT_TYPE_XML = "text/xml; charset=UTF-8"; /** 4K Buffer */ private final static int BUF = 4096; /** * <p> * Process Fedora Access Request. Parse and validate the servlet input * parameters and then execute the specified request. * </p> * * @param request * The servlet request. * @param response * servlet The servlet response. * @throws ServletException * If an error occurs that effects the servlet's basic operation. * @throws IOException * If an error occurrs with an input or output operation. */ @Override public void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { String PID = null; String sDefPID = null; String methodName = null; String dsID = null; Date asOfDateTime = null; Date versDateTime = null; Property[] userParms = null; boolean isGetObjectProfileRequest = false; boolean isGetDisseminationRequest = false; boolean isGetDatastreamDisseminationRequest = false; boolean xml = false; // Portion of initial request URL from protocol up to query string // Moved to a local variable to be thread-safe String requestURI = request.getQueryString() != null ? request.getRequestURL().toString() + "?" + request.getQueryString() : request.getRequestURL().toString(); logger.info("Got request: {}", requestURI); // Parse servlet URL. // For the Fedora API-A-LITE "get" syntax, valid entries include: // // For dissemination requests: // http://host:port/fedora/get/pid/sDefPid/methodName // http://host:port/fedora/get/pid/sDefPid/methodName/timestamp // http://host:port/fedora/get/pid/sDefPid/methodName?parm=value[&parm=value] // http://host:port/fedora/get/pid/sDefPid/methodName/timestamp?parm=value[&parm=value] // // For object profile requests: // http://host:port/fedora/get/pid // http://host:port/fedora/get/pid/timestamp // // For datastream dissemination requests: // http://host:port/fedora/get/pid/dsID // http://host:port/fedora/get/pid/dsID/timestamp // // use substring to avoid an additional char array copy String[] URIArray = requestURI.substring(0, request.getRequestURL().length()).split("/"); if (URIArray.length == 6 || URIArray.length == 7) { // Request is either an ObjectProfile request or a datastream // request if (URIArray.length == 7) { // They either specified a date/time or a datastream id. if (URIArray[6].indexOf(":") == -1) { // If it doesn't contain a colon, they were after a // datastream, // so this is a DatastreamDissemination request dsID = URLDecoder.decode(URIArray[6], "UTF-8"); isGetDatastreamDisseminationRequest = true; } else { // If it DOES contain a colon, they were after a // date/time-stamped object profile try { versDateTime = DateUtility.parseDateStrict(URIArray[6]); } catch (ParseException e) { String message = "ObjectProfile Request Syntax Error: DateTime value " + "of \"" + URIArray[6] + "\" is not a valid DateTime format. " + " <br></br> The expected format for DateTime is \"" + "YYYY-MM-DDTHH:MM:SS.SSSZ\". " + " <br></br> The expected syntax for " + "ObjectProfile requests is: \"" + URIArray[0] + "//" + URIArray[2] + "/" + URIArray[3] + "/" + URIArray[4] + "/PID[/dateTime] \" ." + " <br></br> Submitted request was: \"" + requestURI + "\" . "; logger.warn(message); throw new ServletException("from FedoraAccessServlet" + message); /* * commented out for exception.jsp test * response.setStatus * (HttpServletResponse.SC_INTERNAL_SERVER_ERROR); * response * .sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR * , message); return; commented out for exception.jsp * test */ } asOfDateTime = versDateTime; isGetObjectProfileRequest = true; } } else { // URIArray.length==6 so this is a GetObjectProfile request isGetObjectProfileRequest = true; } } else if (URIArray.length > 7) { // Request is either dissemination request or timestamped get // datastream request methodName = URLDecoder.decode(URIArray[7], "UTF-8"); if (URIArray.length == 8) { if (URIArray[6].indexOf(":") == -1) { // If it doesn't contain a colon, they were after a // timestamped // datastream, so this is a GetDatastreamDissemination // request. dsID = URLDecoder.decode(URIArray[6], "UTF-8"); try { versDateTime = DateUtility.parseDateStrict(URIArray[7]); } catch (ParseException e) { String message = "GetDatastreamDissemination Request Syntax Error: DateTime value " + "of \"" + URIArray[7] + "\" is not a valid DateTime format. " + " <br></br> The expected format for DateTime is \"" + "YYYY-MM-DDTHH:MM:SS.SSSZ\". " + " <br></br> The expected syntax for GetDatastreamDissemination requests is: \"" + URIArray[0] + "//" + URIArray[2] + "/" + URIArray[3] + "/" + URIArray[4] + "/PID/dsID[/dateTime] \" " + " <br></br> Submitted request was: \"" + requestURI + "\" . "; logger.warn(message); throw new ServletException("from FedoraAccessServlet" + message); } asOfDateTime = versDateTime; isGetDatastreamDisseminationRequest = true; } else { isGetDisseminationRequest = true; } } else if (URIArray.length == 9) { try { versDateTime = DateUtility.parseDateStrict(URIArray[8]); } catch (ParseException e) { String message = "Dissemination Request Syntax Error: DateTime value " + "of \"" + URIArray[8] + "\" is not a valid DateTime format. " + " <br></br> The expected format for DateTime is \"" + "YYYY-MM-DDTHH:MM:SS.SSS\". " + " <br></br> The expected syntax for Dissemination requests is: \"" + URIArray[0] + "//" + URIArray[2] + "/" + URIArray[3] + "/" + URIArray[4] + "/PID/sDefPID/methodName[/dateTime][?ParmArray] \" " + " <br></br> Submitted request was: \"" + requestURI + "\" . "; logger.warn(message); throw new ServletException("from FedoraAccessServlet" + message); /* * commented out for exception.jsp test * response.setStatus(HttpServletResponse * .SC_INTERNAL_SERVER_ERROR); * response.sendError(HttpServletResponse * .SC_INTERNAL_SERVER_ERROR, message); return; commented * out for exception.jsp test */ } asOfDateTime = versDateTime; isGetDisseminationRequest = true; } if (URIArray.length > 9) { String message = "Dissemination Request Syntax Error: The expected " + "syntax for Dissemination requests is: \"" + URIArray[0] + "//" + URIArray[2] + "/" + URIArray[3] + "/" + URIArray[4] + "/PID/sDefPID/methodName[/dateTime][?ParmArray] \" " + " <br></br> Submitted request was: \"" + requestURI + "\" . "; logger.warn(message); throw new ServletException("from FedoraAccessServlet" + message); /* * commented out for exception.jsp test * response.setStatus(HttpServletResponse * .SC_INTERNAL_SERVER_ERROR); * response.sendError(HttpServletResponse * .SC_INTERNAL_SERVER_ERROR, message); return; commented out * for exception.jsp test */ } } else { // Bad syntax; redirect to syntax documentation page. response .sendRedirect("/userdocs/client/browser/apialite/index.html"); return; } // Separate out servlet parameters from method parameters Hashtable<String, String> h_userParms = new Hashtable<String, String>(); for (Enumeration<?> e = request.getParameterNames(); e .hasMoreElements();) { String name = URLDecoder.decode((String) e.nextElement(), "UTF-8"); if (isGetObjectProfileRequest && name.equalsIgnoreCase("xml")) { xml = Boolean.parseBoolean(request.getParameter(name)); } else { String value = URLDecoder.decode(request.getParameter(name), "UTF-8"); h_userParms.put(name, value); } } // API-A interface requires user-supplied parameters to be of type // Property[] so create Property[] from hashtable of user parameters. int userParmCounter = 0; userParms = new Property[h_userParms.size()]; for (Enumeration<String> e = h_userParms.keys(); e.hasMoreElements();) { Property userParm = new Property(); userParm.name = e.nextElement(); userParm.value = h_userParms.get(userParm.name); userParms[userParmCounter] = userParm; userParmCounter++; } PID = URIArray[5]; String actionLabel = "Access"; try { if (isGetObjectProfileRequest) { logger.debug("Servicing getObjectProfile request (PID={}, asOfDate={})", PID, versDateTime); Context context = ReadOnlyContext.getContext(HTTP_REQUEST.REST.uri, request); getObjectProfile(context, PID, asOfDateTime, xml, request, response); logger.debug("Finished servicing getObjectProfile request"); } else if (isGetDisseminationRequest) { sDefPID = URIArray[6]; logger.debug("Servicing getDissemination request (PID={}, sDefPID={}, methodName={}, asOfDate={})", PID, sDefPID, methodName, versDateTime); Context context = ReadOnlyContext.getContext(HTTP_REQUEST.REST.uri, request); getDissemination(context, PID, sDefPID, methodName, userParms, asOfDateTime, response, request); logger.debug("Finished servicing getDissemination request"); } else if (isGetDatastreamDisseminationRequest) { logger.debug("Servicing getDatastreamDissemination request " + "(PID={}, dsID={}, asOfDate={})", PID, dsID, versDateTime); Context context = ReadOnlyContext.getContext(HTTP_REQUEST.REST.uri, request); getDatastreamDissemination(context, PID, dsID, asOfDateTime, response, request); logger.debug("Finished servicing getDatastreamDissemination " + "request"); } } catch (MethodNotFoundException e) { logger.error("Method not found for request: " + requestURI + " (actionLabel=" + actionLabel + ")", e); throw new NotFound404Exception("", e, request, actionLabel, e .getMessage(), EMPTY_STRING_ARRAY); } catch (DatastreamNotFoundException e) { logger.error("Datastream not found for request: " + requestURI + " (actionLabel=" + actionLabel + ")", e); throw new NotFound404Exception("", e, request, actionLabel, e .getMessage(), EMPTY_STRING_ARRAY); } catch (ObjectNotFoundException e) { logger.error("Object not found for request: " + requestURI + " (actionLabel=" + actionLabel + ")", e); throw new NotFound404Exception("", e, request, actionLabel, e .getMessage(), EMPTY_STRING_ARRAY); } catch (DisseminationException e) { logger.error("Dissemination failed: " + requestURI + " (actionLabel=" + actionLabel + ")", e); throw new NotFound404Exception("", e, request, actionLabel, e .getMessage(), EMPTY_STRING_ARRAY); } catch (ObjectNotInLowlevelStorageException e) { logger.error("Object or datastream not found for request: " + requestURI + " (actionLabel=" + actionLabel + ")", e); throw new NotFound404Exception("", e, request, actionLabel, e .getMessage(), EMPTY_STRING_ARRAY); } catch (AuthzException ae) { logger.error("Authorization failed for request: " + requestURI + " (actionLabel=" + actionLabel + ")", ae); throw RootException.getServletException(ae, request, actionLabel, EMPTY_STRING_ARRAY); } catch (Throwable th) { logger.error("Unexpected error servicing API-A request", th); throw new InternalError500Exception("", th, request, actionLabel, "", EMPTY_STRING_ARRAY); } } public void getObjectProfile(Context context, String PID, Date asOfDateTime, boolean xml, HttpServletRequest request, HttpServletResponse response) throws ServerException { OutputStreamWriter out = null; Date versDateTime = asOfDateTime; ObjectProfile objProfile = null; PipedWriter pw = null; PipedReader pr = null; try { pw = new PipedWriter(); pr = new PipedReader(pw); objProfile = m_access.getObjectProfile(context, PID, asOfDateTime); if (objProfile != null) { // Object Profile found. // Serialize the ObjectProfile object into XML new ProfileSerializerThread(context, PID, objProfile, versDateTime, pw).start(); if (xml) { // Return results as raw XML response.setContentType(CONTENT_TYPE_XML); // Insures stream read from PipedReader correctly translates // utf-8 // encoded characters to OutputStreamWriter. out = new OutputStreamWriter(response.getOutputStream(), "UTF-8"); char[] buf = new char[BUF]; int len = 0; while ((len = pr.read(buf, 0, BUF)) != -1) { out.write(buf, 0, len); } out.flush(); } else { // Transform results into an html table response.setContentType(CONTENT_TYPE_HTML); out = new OutputStreamWriter(response.getOutputStream(), "UTF-8"); File xslFile = new File(m_server.getHomeDir(), "access/viewObjectProfile.xslt"); Templates template = XmlTransformUtility.getTemplates(xslFile); Transformer transformer = template.newTransformer(); transformer.setParameter("fedora", context .getEnvironmentValue(FEDORA_APP_CONTEXT_NAME)); transformer.transform(new StreamSource(pr), new StreamResult(out)); } out.flush(); } else { throw new GeneralException("No object profile returned"); } } catch (ServerException e) { throw e; } catch (Throwable th) { String message = "Error getting object profile"; logger.error(message, th); throw new GeneralException(message, th); } finally { try { if (pr != null) { pr.close(); } if (out != null) { out.close(); } } catch (Throwable th) { String message = "Error closing output"; logger.error(message, th); throw new StreamIOException(message); } } } public void getDatastreamDissemination(Context context, String PID, String dsID, Date asOfDateTime, HttpServletResponse response, HttpServletRequest request) throws IOException, ServerException { ServletOutputStream out = null; MIMETypedStream dissemination = null; dissemination = m_access.getDatastreamDissemination(context, PID, dsID, asOfDateTime); try { // testing to see what's in request header that might be of interest if (logger.isDebugEnabled()) { for (Enumeration<?> e = request.getHeaderNames(); e .hasMoreElements();) { String name = (String) e.nextElement(); Enumeration<?> headerValues = request.getHeaders(name); StringBuffer sb = new StringBuffer(); while (headerValues.hasMoreElements()) { sb.append((String) headerValues.nextElement()); } String value = sb.toString(); logger.debug("FEDORASERVLET REQUEST HEADER CONTAINED: {} : {}", name, value); } } // Dissemination was successful; // Return MIMETypedStream back to browser client if (dissemination.getStatusCode() == HttpStatus.SC_MOVED_TEMPORARILY) { String location = ""; for (Property prop: dissemination.header) { if (prop.name.equalsIgnoreCase(HttpHeaders.LOCATION)) { location = prop.value; break; } } response.sendRedirect(location); } else { int status = dissemination.getStatusCode(); response.setStatus(status); if (status == HttpStatus.SC_OK) { response.setContentType(dissemination.getMIMEType()); } Property[] headerArray = dissemination.header; if (headerArray != null) { for (int i = 0; i < headerArray.length; i++) { if (headerArray[i].name != null && !headerArray[i].name .equalsIgnoreCase("transfer-encoding") && !headerArray[i].name .equalsIgnoreCase("content-type")) { response.addHeader(headerArray[i].name, headerArray[i].value); logger.debug( "THIS WAS ADDED TO FEDORASERVLET RESPONSE HEADER FROM ORIGINATING PROVIDER {} : {}", headerArray[i].name, headerArray[i].value); } } } out = response.getOutputStream(); int byteStream = 0; logger.debug("Started reading dissemination stream"); InputStream dissemResult = dissemination.getStream(); byte[] buffer = new byte[BUF]; while ((byteStream = dissemResult.read(buffer)) != -1) { out.write(buffer, 0, byteStream); } buffer = null; dissemResult.close(); dissemResult = null; out.flush(); out.close(); logger.debug("Finished reading dissemination stream"); } } finally { dissemination.close(); } } /** * <p> * This method calls the Fedora Access Subsystem to retrieve a MIME-typed * stream corresponding to the dissemination request. * </p> * * @param context * The read only context of the request. * @param PID * The persistent identifier of the Digital Object. * @param sDefPID * The persistent identifier of the Service Definition object. * @param methodName * The method name. * @param userParms * An array of user-supplied method parameters. * @param asOfDateTime * The version datetime stamp of the digital object. * @param response * The servlet response. * @param request * The servlet request. * @throws IOException * If an error occurrs with an input or output operation. * @throws ServerException * If an error occurs in the Access Subsystem. */ public void getDissemination(Context context, String PID, String sDefPID, String methodName, Property[] userParms, Date asOfDateTime, HttpServletResponse response, HttpServletRequest request) throws IOException, ServerException { ServletOutputStream out = null; MIMETypedStream dissemination = null; dissemination = m_access.getDissemination(context, PID, sDefPID, methodName, userParms, asOfDateTime); out = response.getOutputStream(); try { // testing to see what's in request header that might be of interest if (logger.isDebugEnabled()) { for (Enumeration<?> e = request.getHeaderNames(); e .hasMoreElements();) { String name = (String) e.nextElement(); Enumeration<?> headerValues = request.getHeaders(name); StringBuffer sb = new StringBuffer(); while (headerValues.hasMoreElements()) { sb.append((String) headerValues.nextElement()); } String value = sb.toString(); logger.debug("FEDORASERVLET REQUEST HEADER CONTAINED: {} : {}", name, value); } } // Dissemination was successful; // Return MIMETypedStream back to browser client if (dissemination.getStatusCode() == HttpStatus.SC_MOVED_TEMPORARILY) { String location = ""; for (Property prop: dissemination.header) { if (prop.name.equalsIgnoreCase(HttpHeaders.LOCATION)) { location = prop.value; break; } } response.sendRedirect(location); } else { response.setContentType(dissemination.getMIMEType()); Property[] headerArray = dissemination.header; if (headerArray != null) { for (int i = 0; i < headerArray.length; i++) { if (headerArray[i].name != null && !headerArray[i].name .equalsIgnoreCase("transfer-encoding") && !headerArray[i].name .equalsIgnoreCase("content-type")) { response.addHeader(headerArray[i].name, headerArray[i].value); logger.debug( "THIS WAS ADDED TO FEDORASERVLET RESPONSE HEADER FROM ORIGINATING PROVIDER {} : {}", headerArray[i].name, headerArray[i].value); } } } int byteStream = 0; logger.debug("Started reading dissemination stream"); InputStream dissemResult = dissemination.getStream(); byte[] buffer = new byte[BUF]; while ((byteStream = dissemResult.read(buffer)) != -1) { out.write(buffer, 0, byteStream); } buffer = null; dissemResult.close(); dissemResult = null; out.flush(); out.close(); logger.debug("Finished reading dissemination stream"); } } finally { dissemination.close(); } } /** * <p> * A Thread to serialize an ObjectProfile object into XML. * </p> */ public class ProfileSerializerThread extends Thread { private PipedWriter pw = null; private String PID = null; private ObjectProfile objProfile = null; private Date versDateTime = null; /** * <p> * Constructor for ProfileSerializeThread. * </p> * * @param PID * The persistent identifier of the specified digital object. * @param objProfile * An object profile data structure. * @param versDateTime * The version datetime stamp of the request. * @param pw * A PipedWriter to which the serialization info is written. */ public ProfileSerializerThread(Context context, String PID, ObjectProfile objProfile, Date versDateTime, PipedWriter pw) { this.pw = pw; this.PID = PID; this.objProfile = objProfile; this.versDateTime = versDateTime; } /** * <p> * This method executes the thread. * </p> */ @Override public void run() { if (pw != null) { try { pw.write("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n"); pw.write("<objectProfile pid=\""); StreamUtility.enc(PID, pw); pw.write('"'); if (versDateTime != null) { DateUtility.convertDateToString(versDateTime); pw.write(" dateTime=\"" + DateUtility.convertDateToString(versDateTime) + "\""); } pw.write(" xmlns=\"" + OBJ_PROFILE1_0.namespace.uri + "\""); pw.write(" xmlns:xsi=\"" + XSI.uri + "\"" + " xsi:schemaLocation=\"" + OBJ_PROFILE1_0.namespace.uri + " " + OBJ_PROFILE1_0.xsdLocation + "\">"); // PROFILE FIELDS SERIALIZATION pw.write("<objLabel>"); StreamUtility.enc(objProfile.objectLabel, pw); pw.write("</objLabel>"); pw.write("<objOwnerId>"); StreamUtility.enc(objProfile.objectOwnerId, pw); pw.write("</objOwnerId>"); pw.write("<objModels>\n"); for (String model : objProfile.objectModels) { pw.write("<model>" + model + "</model>\n"); } pw.write("</objModels>"); String cDate = DateUtility .convertDateToString(objProfile.objectCreateDate); pw.write("<objCreateDate>" + cDate + "</objCreateDate>"); String mDate = DateUtility .convertDateToString(objProfile.objectLastModDate); pw.write("<objLastModDate>" + mDate + "</objLastModDate>");; pw.write("<objDissIndexViewURL>"); StreamUtility.enc(objProfile.dissIndexViewURL, pw); pw.write("</objDissIndexViewURL>"); pw.write("<objItemIndexViewURL>"); StreamUtility.enc(objProfile.itemIndexViewURL, pw); pw.write("</objItemIndexViewURL>"); pw.write("</objectProfile>"); pw.flush(); pw.close(); } catch (IOException ioe) { logger.error("WriteThread IOException", ioe); } finally { try { if (pw != null) { pw.close(); } } catch (IOException ioe) { logger.error("WriteThread IOException", ioe); } } } } } /** * <p> * For now, treat a HTTP POST request just like a GET request. * </p> * * @param request * The servet request. * @param response * The servlet response. * @throws ServletException * If thrown by <code>doGet</code>. * @throws IOException * If thrown by <code>doGet</code>. */ @Override public void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { doGet(request, response); } }
// Decompiled by Jad v1.5.8e. Copyright 2001 Pavel Kouznetsov. // Jad home page: http://www.geocities.com/kpdus/jad.html // Decompiler options: braces fieldsfirst space lnc package u.aly; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.Serializable; import java.util.Collections; import java.util.EnumMap; import java.util.HashMap; import java.util.Map; // Referenced classes of package u.aly: // bz, dd, ct, di, // aH, dj, aJ, cl, // cm, cf, cs, dk, // cy, dh, dg, bw, // cz, cg public class ar implements Serializable, Cloneable, bz { public static final Map d; private static final dd e = new dd("IdSnapshot"); private static final ct f = new ct("identity", (byte)11, (short)1); private static final ct g = new ct("ts", (byte)10, (short)2); private static final ct h = new ct("version", (byte)8, (short)3); private static final Map i; private static final int j = 0; private static final int k = 1; public String a; public long b; public int c; private byte l; public ar() { l = 0; } public ar(String s, long l1, int i1) { this(); a = s; b = l1; b(true); c = i1; c(true); } public ar(ar ar1) { l = 0; l = ar1.l; if (ar1.e()) { a = ar1.a; } b = ar1.b; c = ar1.c; } private void a(ObjectInputStream objectinputstream) { try { l = 0; a(((cy) (new cs(new dk(objectinputstream))))); return; } catch (cf cf1) { throw new IOException(cf1.getMessage()); } } private void a(ObjectOutputStream objectoutputstream) { try { b(new cs(new dk(objectoutputstream))); return; } catch (cf cf1) { throw new IOException(cf1.getMessage()); } } static dd n() { return e; } static ct o() { return f; } static ct p() { return g; } static ct q() { return h; } public ar a() { return new ar(this); } public ar a(int i1) { c = i1; c(true); return this; } public ar a(long l1) { b = l1; b(true); return this; } public ar a(String s) { a = s; return this; } public void a(cy cy1) { ((dh)i.get(cy1.D())).b().a(cy1, this); } public void a(boolean flag) { if (!flag) { a = null; } } public cg b(int i1) { return c(i1); } public void b() { a = null; b(false); b = 0L; c(false); c = 0; } public void b(cy cy1) { ((dh)i.get(cy1.D())).b().b(cy1, this); } public void b(boolean flag) { l = bw.a(l, 0, flag); } public String c() { return a; } public e c(int i1) { return e.a(i1); } public void c(boolean flag) { l = bw.a(l, 1, flag); } public void d() { a = null; } public boolean e() { return a != null; } public long f() { return b; } public bz g() { return a(); } public void h() { l = bw.b(l, 0); } public boolean i() { return bw.a(l, 0); } public int j() { return c; } public void k() { l = bw.b(l, 1); } public boolean l() { return bw.a(l, 1); } public void m() { if (a == null) { throw new cz((new StringBuilder("Required field 'identity' was not present! Struct: ")).append(toString()).toString()); } else { return; } } public String toString() { StringBuilder stringbuilder = new StringBuilder("IdSnapshot("); stringbuilder.append("identity:"); if (a == null) { stringbuilder.append("null"); } else { stringbuilder.append(a); } stringbuilder.append(", "); stringbuilder.append("ts:"); stringbuilder.append(b); stringbuilder.append(", "); stringbuilder.append("version:"); stringbuilder.append(c); stringbuilder.append(")"); return stringbuilder.toString(); } static { i = new HashMap(); i.put(u/aly/di, new aH(null)); i.put(u/aly/dj, new aJ(null)); EnumMap enummap = new EnumMap(u/aly/ar$e); enummap.put(e.a, new cl("identity", (byte)1, new cm((byte)11))); enummap.put(e.b, new cl("ts", (byte)1, new cm((byte)10))); enummap.put(e.c, new cl("version", (byte)1, new cm((byte)8))); d = Collections.unmodifiableMap(enummap); cl.a(u/aly/ar, d); } private class e extends Enum implements cg { public static final e a; public static final e b; public static final e c; private static final Map d; private static final e g[]; private final short e; private final String f; public static e a(int i1) { switch (i1) { default: return null; case 1: // '\001' return a; case 2: // '\002' return b; case 3: // '\003' return c; } } public static e a(String s) { return (e)d.get(s); } public static e b(int i1) { e e1 = a(i1); if (e1 == null) { throw new IllegalArgumentException((new StringBuilder("Field ")).append(i1).append(" doesn't exist!").toString()); } else { return e1; } } public static e valueOf(String s) { return (e)Enum.valueOf(u/aly/ar$e, s); } public static e[] values() { e ae[] = g; int i1 = ae.length; e ae1[] = new e[i1]; System.arraycopy(ae, 0, ae1, 0, i1); return ae1; } public short a() { return e; } public String b() { return f; } static { a = new e("IDENTITY", 0, (short)1, "identity"); b = new e("TS", 1, (short)2, "ts"); c = new e("VERSION", 2, (short)3, "version"); e ae[] = new e[3]; ae[0] = a; ae[1] = b; ae[2] = c; g = ae; d = new HashMap(); Iterator iterator = EnumSet.allOf(u/aly/ar$e).iterator(); do { if (!iterator.hasNext()) { return; } e e1 = (e)iterator.next(); d.put(e1.b(), e1); } while (true); } private e(String s, int i1, short word0, String s1) { super(s, i1); e = word0; f = s1; } } }
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.openqa.selenium.firefox; import static java.util.Collections.singletonMap; import static java.util.Collections.unmodifiableList; import static java.util.Collections.unmodifiableMap; import static org.openqa.selenium.firefox.FirefoxDriver.Capability.BINARY; import static org.openqa.selenium.firefox.FirefoxDriver.Capability.MARIONETTE; import static org.openqa.selenium.firefox.FirefoxDriver.Capability.PROFILE; import com.google.common.collect.ImmutableSortedMap; import org.openqa.selenium.internal.Require; import org.openqa.selenium.remote.AbstractDriverOptions; import org.openqa.selenium.Capabilities; import org.openqa.selenium.WebDriverException; import org.openqa.selenium.remote.BrowserType; import org.openqa.selenium.remote.CapabilityType; import java.io.File; import java.io.IOException; import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; /** * Manage firefox specific settings in a way that geckodriver can understand. * <p> * An example of usage: * <pre> * FirefoxOptions options = new FirefoxOptions() * .addPreference("browser.startup.page", 1) * .addPreference("browser.startup.homepage", "https://www.google.co.uk"); * WebDriver driver = new FirefoxDriver(options); * </pre> */ public class FirefoxOptions extends AbstractDriverOptions<FirefoxOptions> { public static final String FIREFOX_OPTIONS = "moz:firefoxOptions"; private List<String> args = new ArrayList<>(); private Map<String, Object> preferences = new HashMap<>(); private FirefoxDriverLogLevel logLevel; private Binary binary; private boolean legacy; private FirefoxProfile profile; public FirefoxOptions() { // Read system properties and use those if they are set, allowing users to override them later // should they want to. String binary = System.getProperty(FirefoxDriver.SystemProperty.BROWSER_BINARY); if (binary != null) { setBinary(binary); } String profileName = System.getProperty(FirefoxDriver.SystemProperty.BROWSER_PROFILE); if (profileName != null) { FirefoxProfile profile = new ProfilesIni().getProfile(profileName); if (profile == null) { throw new WebDriverException(String.format( "Firefox profile '%s' named in system property '%s' not found", profileName, FirefoxDriver.SystemProperty.BROWSER_PROFILE)); } setProfile(profile); } String forceMarionette = System.getProperty(FirefoxDriver.SystemProperty.DRIVER_USE_MARIONETTE); if (forceMarionette != null) { setLegacy(!Boolean.getBoolean(FirefoxDriver.SystemProperty.DRIVER_USE_MARIONETTE)); } setCapability(CapabilityType.BROWSER_NAME, BrowserType.FIREFOX); setAcceptInsecureCerts(true); } public FirefoxOptions(Capabilities source) { // We need to initialize all our own fields before calling. super(); source.asMap().forEach((key, value)-> { if (value != null) { setCapability(key, value); } }); // If `source` has options, we need to mirror those into this instance. This may be either a // Map (if we're constructing from a serialized instance) or another FirefoxOptions. *sigh* Object raw = source.getCapability(FIREFOX_OPTIONS); if (raw == null) { return; } if (raw instanceof FirefoxOptions) { FirefoxOptions that = (FirefoxOptions) raw; addArguments(that.args); that.preferences.forEach(this::addPreference); setLegacy(that.legacy); if (that.logLevel != null) { setLogLevel(that.logLevel); } if (that.binary != null) { setCapability(BINARY, that.binary.asCapability()); } if (that.profile != null) { setProfile(that.profile); } } else if (raw instanceof Map) { Map<?, ?> that = (Map<?, ?>) raw; if (that.containsKey("args")) { Object value = that.get("args"); if (value instanceof String) { addArguments((String) that.get("args")); } else if (value instanceof List<?>) { addArguments((List<String>) that.get("args")); } else { // last resort addArguments(that.get("args").toString()); } } if (that.containsKey("prefs")) { Map<String, Object> prefs = (Map<String, Object>) that.get("prefs"); preferences.putAll(prefs); } if (that.containsKey("binary")) { setBinary((String) that.get("binary")); } if (that.containsKey("log")) { Map<?, ?> logStruct = (Map<?, ?>) that.get("log"); Object rawLevel = logStruct.get("level"); if (rawLevel instanceof String) { setLogLevel(FirefoxDriverLogLevel.fromString((String) rawLevel)); } else if (rawLevel instanceof FirefoxDriverLogLevel) { setLogLevel((FirefoxDriverLogLevel) rawLevel); } } if (that.containsKey("profile")) { Object value = that.get("profile"); if (value instanceof String) { try { setProfile(FirefoxProfile.fromJson((String) value)); } catch (IOException e) { throw new WebDriverException(e); } } else if (value instanceof FirefoxProfile) { setProfile((FirefoxProfile) value); } else { throw new WebDriverException( "In FirefoxOptions, don't know how to convert profile: " + that); } } } } public FirefoxOptions setLegacy(boolean legacy) { setCapability(MARIONETTE, !legacy); return this; } public boolean isLegacy() { return legacy; } public FirefoxOptions setBinary(FirefoxBinary binary) { setCapability(BINARY, binary); return this; } public FirefoxOptions setBinary(Path path) { setCapability(BINARY, path); return this; } public FirefoxOptions setBinary(String path) { setCapability(BINARY, path); return this; } /** * Constructs a {@link FirefoxBinary} and returns that to be used, and because of this is only * useful when actually starting firefox. */ public FirefoxBinary getBinary() { return getBinaryOrNull().orElseGet(FirefoxBinary::new); } public Optional<FirefoxBinary> getBinaryOrNull() { return Optional.ofNullable(binary).map(Binary::asBinary); } public FirefoxOptions setProfile(FirefoxProfile profile) { setCapability(FirefoxDriver.Capability.PROFILE, profile); return this; } public FirefoxProfile getProfile() { return profile; } public FirefoxOptions addArguments(String... arguments) { addArguments(Arrays.asList(arguments)); return this; } public FirefoxOptions addArguments(List<String> arguments) { args.addAll(arguments); return this; } public FirefoxOptions addPreference(String key, Object value) { preferences.put(Require.nonNull("Key", key), value); return this; } public FirefoxOptions setLogLevel(FirefoxDriverLogLevel logLevel) { this.logLevel = Require.nonNull("Log level", logLevel); return this; } public FirefoxOptions setHeadless(boolean headless) { args.remove("-headless"); if (headless) { args.add("-headless"); } return this; } @Override public void setCapability(String key, Object value) { switch (key) { case BINARY: binary = new Binary(Require.nonNull("Binary value", value)); value = binary.asCapability(); break; case MARIONETTE: if (value instanceof Boolean) { legacy = !(Boolean) value; } break; case PROFILE: if (value instanceof FirefoxProfile) { profile = (FirefoxProfile) value; } else if (value instanceof String) { try { profile = FirefoxProfile.fromJson((String) value); } catch (IOException e) { throw new WebDriverException(e); } value = profile; } else { throw new WebDriverException("Unexpected value for profile: " + value); } break; default: // Do nothing } super.setCapability(key, value); } @Override public Map<String, Object> asMap() { Map<String, Object> toReturn = new HashMap<>(super.asMap()); ImmutableSortedMap.Builder<String, Object> w3cOptions = ImmutableSortedMap.naturalOrder(); w3cOptions.put("args", unmodifiableList(new ArrayList<>(args))); if (binary != null) { w3cOptions.put("binary", binary.asPath()); } if (logLevel != null) { w3cOptions.put("log", singletonMap("level", logLevel)); } if (profile != null) { preferences.forEach(profile::setPreference); try { w3cOptions.put("profile", profile.toJson()); } catch (IOException e) { throw new WebDriverException(e); } } else { w3cOptions.put("prefs", unmodifiableMap(new HashMap<>(preferences))); } toReturn.put(FIREFOX_OPTIONS, w3cOptions.build()); return unmodifiableMap(toReturn); } @Override public FirefoxOptions merge(Capabilities capabilities) { super.merge(capabilities); return this; } @Override protected int amendHashCode() { return Objects.hash( args, preferences, logLevel, binary, legacy, profile); } private class Binary { private String path; private FirefoxBinary binary; public Binary(Object value) { if (value instanceof FirefoxBinary) { this.binary = (FirefoxBinary) value; binary.amendOptions(FirefoxOptions.this); return; } if (value instanceof Path || value instanceof String) { this.path = value.toString().replace('\\', '/'); return; } throw new IllegalArgumentException("Unrecognised type for binary: " + value); } FirefoxBinary asBinary() { return binary == null ? new FirefoxBinary(new File(path)) : binary; } Object asCapability() { return binary == null ? path : binary; } String asPath() { return binary == null ? path : binary.getPath(); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (!(o instanceof Binary)) { return false; } Binary that = (Binary) o; return Objects.equals(this.path, that.path) && Objects.equals(this.binary, that.binary); } @Override public int hashCode() { return Objects.hash(path, binary); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal.cache.tx; import java.io.ByteArrayInputStream; import java.io.DataInput; import java.io.DataInputStream; import java.io.DataOutput; import java.io.IOException; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.Set; import org.apache.logging.log4j.Logger; import org.apache.geode.CancelException; import org.apache.geode.DataSerializer; import org.apache.geode.cache.TransactionDataNodeHasDepartedException; import org.apache.geode.cache.TransactionException; import org.apache.geode.distributed.DistributedMember; import org.apache.geode.distributed.internal.ClusterDistributionManager; import org.apache.geode.distributed.internal.DistributionManager; import org.apache.geode.distributed.internal.DistributionMessage; import org.apache.geode.distributed.internal.DistributionStats; import org.apache.geode.distributed.internal.InternalDistributedSystem; import org.apache.geode.distributed.internal.ReplyException; import org.apache.geode.distributed.internal.ReplyMessage; import org.apache.geode.distributed.internal.ReplyProcessor21; import org.apache.geode.distributed.internal.membership.InternalDistributedMember; import org.apache.geode.internal.Assert; import org.apache.geode.internal.HeapDataOutputStream; import org.apache.geode.internal.cache.InitialImageOperation; import org.apache.geode.internal.cache.LocalRegion; import org.apache.geode.internal.cache.PartitionedRegion; import org.apache.geode.internal.cache.RemoteOperationException; import org.apache.geode.internal.i18n.LocalizedStrings; import org.apache.geode.internal.logging.LogService; import org.apache.geode.internal.logging.log4j.LogMarker; import org.apache.geode.internal.util.ObjectIntProcedure; public class RemoteFetchKeysMessage extends RemoteOperationMessage { private static final Logger logger = LogService.getLogger(); public RemoteFetchKeysMessage() {} private RemoteFetchKeysMessage(InternalDistributedMember recipient, String regionPath, ReplyProcessor21 processor) { super(recipient, regionPath, processor); } @Override protected boolean operateOnRegion(ClusterDistributionManager dm, LocalRegion r, long startTime) throws RemoteOperationException { if (!(r instanceof PartitionedRegion)) { // prs already wait on initialization r.waitOnInitialization(); // bug #43371 - accessing a region before it's initialized } Set<?> keys = r.keySet(); try { RemoteFetchKeysReplyMessage.send(getSender(), processorId, dm, keys); } catch (IOException io) { if (logger.isDebugEnabled()) { logger.debug("Caught exception while sending keys: {}", io.getMessage(), io); throw new RemoteOperationException( LocalizedStrings.FetchKeysMessage_UNABLE_TO_SEND_RESPONSE_TO_FETCH_KEYS_REQUEST .toLocalizedString(), io); } } return false; } public int getDSFID() { return R_FETCH_KEYS_MESSAGE; } /** * @return the response */ public static FetchKeysResponse send(LocalRegion currRegion, DistributedMember target) { FetchKeysResponse response = new FetchKeysResponse(currRegion.getSystem(), (InternalDistributedMember) target); RemoteFetchKeysMessage msg = new RemoteFetchKeysMessage((InternalDistributedMember) target, currRegion.getFullPath(), response); currRegion.getSystem().getDistributionManager().putOutgoing(msg); return response; } @Override public void toData(DataOutput out) throws IOException { super.toData(out); } @Override public void fromData(DataInput in) throws IOException, ClassNotFoundException { super.fromData(in); } public static class RemoteFetchKeysReplyMessage extends ReplyMessage { /** The number of the series */ int seriesNum; /** The message number in the series */ int msgNum; /** The total number of series */ int numSeries; /** Whether this is the last of a series */ boolean lastInSeries; /** the stream holding the chunk to send */ transient HeapDataOutputStream chunkStream; /** the array holding data received */ transient byte[] chunk; /** * Empty constructor to conform to DataSerializable interface */ public RemoteFetchKeysReplyMessage() {} private RemoteFetchKeysReplyMessage(InternalDistributedMember recipient, int processorId, HeapDataOutputStream chunk, int seriesNum, int msgNum, int numSeries, boolean lastInSeries) { super(); setRecipient(recipient); setProcessorId(processorId); this.seriesNum = seriesNum; this.msgNum = msgNum; this.numSeries = numSeries; this.lastInSeries = lastInSeries; this.chunkStream = chunk; } /** * Send an ack * * @throws IOException if the peer is no longer available */ public static void send(final InternalDistributedMember recipient, final int processorId, final DistributionManager dm, Set<?> keys) throws IOException { Assert.assertTrue(recipient != null, "FetchKeysReplyMessage NULL reply message"); final int numSeries = 1; final int seriesNum = 0; // chunkEntries returns false if didn't finish if (logger.isDebugEnabled()) { logger.debug("Starting region keys chunking for {} keys to member {}", keys.size(), recipient); } boolean finished = chunkSet(recipient, keys, InitialImageOperation.CHUNK_SIZE_IN_BYTES, false, new ObjectIntProcedure() { int msgNum = 0; boolean last = false; /** * @param a byte[] chunk * @param b positive if last chunk * @return true to continue to next chunk */ public boolean executeWith(Object a, int b) { HeapDataOutputStream chunk = (HeapDataOutputStream) a; this.last = b > 0; try { boolean okay = sendChunk(recipient, processorId, dm, chunk, seriesNum, msgNum++, numSeries, this.last); return okay; } catch (CancelException e) { return false; } } }); if (logger.isDebugEnabled()) { logger.debug("{} region keys chunking", (finished ? "Finished" : "DID NOT complete")); } } static boolean sendChunk(InternalDistributedMember recipient, int processorId, DistributionManager dm, HeapDataOutputStream chunk, int seriesNum, int msgNum, int numSeries, boolean lastInSeries) { RemoteFetchKeysReplyMessage reply = new RemoteFetchKeysReplyMessage(recipient, processorId, chunk, seriesNum, msgNum, numSeries, lastInSeries); Set<?> failures = dm.putOutgoing(reply); return (failures == null) || (failures.size() == 0); } /** * Serialize the given set's elements into byte[] chunks, calling proc for each one. proc args: * the byte[] chunk and an int indicating whether it is the last chunk (positive means last * chunk, zero otherwise). The return value of proc indicates whether to continue to the next * chunk (true) or abort (false). * * @return true if finished all chunks, false if stopped early */ static boolean chunkSet(InternalDistributedMember recipient, Set<?> set, int CHUNK_SIZE_IN_BYTES, boolean includeValues, ObjectIntProcedure proc) throws IOException { @SuppressWarnings("rawtypes") Iterator it = set.iterator(); boolean keepGoing = true; boolean sentLastChunk = false; // always write at least one chunk final HeapDataOutputStream mos = new HeapDataOutputStream( InitialImageOperation.CHUNK_SIZE_IN_BYTES + 2048, recipient.getVersionObject()); do { mos.reset(); int avgItemSize = 0; int itemCount = 0; while ((mos.size() + avgItemSize) < InitialImageOperation.CHUNK_SIZE_IN_BYTES && it.hasNext()) { Object key = it.next(); DataSerializer.writeObject(key, mos); // Note we track the itemCount so we can compute avgItemSize itemCount++; // Note we track avgItemSize to help us not to always go one item // past the max chunk size. When we go past it causes us to grow // the ByteBuffer that the chunk is stored in resulting in a copy // of the data. avgItemSize = mos.size() / itemCount; } // Write "end of chunk" entry to indicate end of chunk DataSerializer.writeObject((Object) null, mos); // send 1 for last message if no more data int lastMsg = it.hasNext() ? 0 : 1; keepGoing = proc.executeWith(mos, lastMsg); sentLastChunk = lastMsg == 1 && keepGoing; // if this region is destroyed while we are sending data, then abort. } while (keepGoing && it.hasNext()); // return false if we were told to abort return sentLastChunk; } /** * Processes this message. This method is invoked by the receiver of the message. * * @param dm the distribution manager that is processing the message. */ @Override public void process(final DistributionManager dm, final ReplyProcessor21 p) { final long startTime = getTimestamp(); FetchKeysResponse processor = (FetchKeysResponse) p; if (processor == null) { if (logger.isTraceEnabled(LogMarker.DM_VERBOSE)) { logger.trace(LogMarker.DM_VERBOSE, "FetchKeysReplyMessage processor not found"); } return; } processor.process(this); if (logger.isTraceEnabled(LogMarker.DM_VERBOSE)) { logger.trace(LogMarker.DM_VERBOSE, "{} Remote-processed {}", processor, this); } dm.getStats().incReplyMessageTime(DistributionStats.getStatTime() - startTime); } @Override public void toData(DataOutput out) throws IOException { super.toData(out); out.writeInt(this.seriesNum); out.writeInt(this.msgNum); out.writeInt(this.numSeries); out.writeBoolean(this.lastInSeries); DataSerializer.writeObjectAsByteArray(this.chunkStream, out); } @Override public int getDSFID() { return R_FETCH_KEYS_REPLY; } @Override public void fromData(DataInput in) throws IOException, ClassNotFoundException { super.fromData(in); this.seriesNum = in.readInt(); this.msgNum = in.readInt(); this.numSeries = in.readInt(); this.lastInSeries = in.readBoolean(); this.chunk = DataSerializer.readByteArray(in); } @Override public String toString() { StringBuffer sb = new StringBuffer(); sb.append("RemoteFetchKeysReplyMessage ").append("processorid=").append(this.processorId); if (getSender() != null) { sb.append(",sender=").append(this.getSender()); } sb.append(",seriesNum=").append(seriesNum).append(",msgNum=").append(msgNum) .append(",numSeries=").append(numSeries).append(",lastInSeries=").append(lastInSeries); if (chunkStream != null) { sb.append(",size=").append(chunkStream.size()); } else if (chunk != null) { sb.append(",size=").append(chunk.length); } if (getException() != null) { sb.append(", exception=").append(getException()); } return sb.toString(); } } public static class FetchKeysResponse extends ReplyProcessor21 { private final Set<Object> returnValue; /** lock used to synchronize chunk processing */ private final Object endLock = new Object(); /** number of chunks processed */ private volatile int chunksProcessed; /** chunks expected (set when last chunk has been processed */ private volatile int chunksExpected; /** whether the last chunk has been processed */ private volatile boolean lastChunkReceived; public FetchKeysResponse(InternalDistributedSystem system, InternalDistributedMember member) { super(system, member); returnValue = new HashSet<>(); } @Override public void process(DistributionMessage msg) { boolean doneProcessing = false; try { if (msg instanceof RemoteFetchKeysReplyMessage) { RemoteFetchKeysReplyMessage fkrm = (RemoteFetchKeysReplyMessage) msg; if (fkrm.getException() != null) { doneProcessing = true; } else { doneProcessing = processChunk((RemoteFetchKeysReplyMessage) msg); } } else { doneProcessing = true; } } finally { if (doneProcessing) { super.process(msg); } } } /** * @return true if done processing */ boolean processChunk(RemoteFetchKeysReplyMessage msg) { // this processing algorithm won't work well if there are multiple recipients. currently the // retry logic for failed recipients is in PartitionedRegion. If we parallelize the sending // of this message, we'll need to handle fail over in this processor class and track results // differently. boolean doneProcessing = false; try { ByteArrayInputStream byteStream = new ByteArrayInputStream(msg.chunk); DataInputStream in = new DataInputStream(byteStream); while (in.available() > 0) { Object key = DataSerializer.readObject(in); if (key != null) { synchronized (returnValue) { returnValue.add(key); } } else { // null should signal the end of the set of keys Assert.assertTrue(in.available() == 0); } } synchronized (this.endLock) { chunksProcessed = chunksProcessed + 1; if (((msg.seriesNum + 1) == msg.numSeries) && msg.lastInSeries) { lastChunkReceived = true; chunksExpected = msg.msgNum + 1; } if (lastChunkReceived && (chunksExpected == chunksProcessed)) { doneProcessing = true; } if (logger.isTraceEnabled(LogMarker.DM_VERBOSE)) { logger.trace(LogMarker.DM_VERBOSE, "{} chunksProcessed={},lastChunkReceived={},chunksExpected={},done={}", this, chunksProcessed, lastChunkReceived, chunksExpected, doneProcessing); } } } catch (Exception e) { processException(new ReplyException( LocalizedStrings.FetchKeysMessage_ERROR_DESERIALIZING_KEYS.toLocalizedString(), e)); } return doneProcessing; } @SuppressWarnings("rawtypes") public Set waitForKeys() { try { waitForRepliesUninterruptibly(); } catch (ReplyException e) { if (e.getCause() instanceof RemoteOperationException) { if (e.getCause().getCause() instanceof CancelException) { throw new TransactionDataNodeHasDepartedException("Node departed while fetching keys"); } } e.handleCause(); if (!this.lastChunkReceived) { throw new TransactionException(e); } } return Collections.unmodifiableSet(this.returnValue); } } }
/* * Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.siddhi.core.query.function; import org.apache.log4j.Logger; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.wso2.siddhi.core.ExecutionPlanRuntime; import org.wso2.siddhi.core.SiddhiManager; import org.wso2.siddhi.core.event.Event; import org.wso2.siddhi.core.query.output.callback.QueryCallback; import org.wso2.siddhi.core.stream.input.InputHandler; import org.wso2.siddhi.core.util.EventPrinter; import org.wso2.siddhi.query.api.exception.ExecutionPlanValidationException; public class MaximumFunctionExtensionTestCase { private static final Logger log = Logger.getLogger(MaximumFunctionExtensionTestCase.class); private volatile int count; private volatile boolean eventArrived; @Before public void init() { count = 0; eventArrived = false; } @Test public void testMaxFunctionExtension1() throws InterruptedException { log.info("MaximumFunctionExecutor TestCase 1"); SiddhiManager siddhiManager = new SiddhiManager(); String inStreamDefinition = "define stream inputStream (price1 double,price2 double, price3 double);"; String query = ("@info(name = 'query1') from inputStream " + "select maximum(price1, price2, price3) as max " + "insert into outputStream;"); ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(inStreamDefinition + query); executionPlanRuntime.addCallback("query1", new QueryCallback() { @Override public void receive(long timeStamp, Event[] inEvents, Event[] removeEvents) { EventPrinter.print(timeStamp, inEvents, removeEvents); eventArrived = true; for (Event event : inEvents) { count++; switch (count) { case 1: Assert.assertEquals(36.75, event.getData(0)); break; case 2: Assert.assertEquals(38.12, event.getData(0)); break; case 3: Assert.assertEquals(39.25, event.getData(0)); break; case 4: Assert.assertEquals(37.75, event.getData(0)); break; case 5: Assert.assertEquals(38.12, event.getData(0)); break; case 6: Assert.assertEquals(40.0, event.getData(0)); break; default: org.junit.Assert.fail(); } } } }); InputHandler inputHandler = executionPlanRuntime.getInputHandler("inputStream"); executionPlanRuntime.start(); inputHandler.send(new Object[]{36, 36.75, 35.75}); inputHandler.send(new Object[]{37.88, 38.12, 37.62}); inputHandler.send(new Object[]{39.00, 39.25, 38.62}); inputHandler.send(new Object[]{36.88, 37.75, 36.75}); inputHandler.send(new Object[]{38.12, 38.12, 37.75}); inputHandler.send(new Object[]{38.12, 40, 37.75}); Thread.sleep(300); Assert.assertEquals(6, count); Assert.assertTrue(eventArrived); executionPlanRuntime.shutdown(); } @Test(expected = ExecutionPlanValidationException.class) public void testMaxFunctionExtension2() throws InterruptedException { log.info("MaximumFunctionExecutor TestCase 2"); SiddhiManager siddhiManager = new SiddhiManager(); String inStreamDefinition = "define stream inputStream (price1 int,price2 double, price3 double);"; String query = ("@info(name = 'query1') from inputStream " + "select maximum(price1, price2, price3) as max " + "insert into outputStream;"); ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(inStreamDefinition + query); } @Test public void testMaxFunctionExtension3() throws InterruptedException { log.info("MaximumFunctionExecutor TestCase 3"); SiddhiManager siddhiManager = new SiddhiManager(); String inStreamDefinition = "define stream inputStream (price1 int,price2 int, price3 int);"; String query = ("@info(name = 'query1') from inputStream " + "select maximum(price1, price2, price3) as max " + "insert into outputStream;"); ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(inStreamDefinition + query); executionPlanRuntime.addCallback("query1", new QueryCallback() { @Override public void receive(long timeStamp, Event[] inEvents, Event[] removeEvents) { EventPrinter.print(timeStamp, inEvents, removeEvents); eventArrived = true; for (Event event : inEvents) { count++; switch (count) { case 1: Assert.assertEquals(74, event.getData(0)); break; case 2: Assert.assertEquals(78, event.getData(0)); break; case 3: Assert.assertEquals(39, event.getData(0)); break; default: org.junit.Assert.fail(); } } } }); InputHandler inputHandler = executionPlanRuntime.getInputHandler("inputStream"); executionPlanRuntime.start(); inputHandler.send(new Object[]{36, 38, 74}); inputHandler.send(new Object[]{78, 38, 37}); inputHandler.send(new Object[]{9, 39, 38}); Thread.sleep(300); Assert.assertEquals(3, count); Assert.assertTrue(eventArrived); executionPlanRuntime.shutdown(); } @Test public void testMaxFunctionExtension4() throws InterruptedException { log.info("MaximumFunctionExecutor TestCase 4"); SiddhiManager siddhiManager = new SiddhiManager(); String inStreamDefinition = "define stream inputStream (price1 float, price2 float, price3 float);"; String query = ("@info(name = 'query1') from inputStream " + "select maximum(price1, price2, price3) as max " + "insert into outputStream;"); ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(inStreamDefinition + query); executionPlanRuntime.addCallback("query1", new QueryCallback() { @Override public void receive(long timeStamp, Event[] inEvents, Event[] removeEvents) { EventPrinter.print(timeStamp, inEvents, removeEvents); eventArrived = true; for (Event event : inEvents) { count++; switch (count) { case 1: Assert.assertEquals(36.75f, event.getData(0)); break; case 2: Assert.assertEquals(38.12f, event.getData(0)); break; case 3: Assert.assertEquals(39.25f, event.getData(0)); break; case 4: Assert.assertEquals(37.75f, event.getData(0)); break; case 5: Assert.assertEquals(38.12f, event.getData(0)); break; case 6: Assert.assertEquals(40.0f, event.getData(0)); break; default: org.junit.Assert.fail(); } } } }); InputHandler inputHandler = executionPlanRuntime.getInputHandler("inputStream"); executionPlanRuntime.start(); inputHandler.send(new Object[]{36, 36.75, 35.75}); inputHandler.send(new Object[]{37.88, 38.12, 37.62}); inputHandler.send(new Object[]{39.00, 39.25, 38.62}); inputHandler.send(new Object[]{36.88, 37.75, 36.75}); inputHandler.send(new Object[]{38.12, 38.12, 37.75}); inputHandler.send(new Object[]{38.12, 40, 37.75}); Thread.sleep(300); Assert.assertEquals(6, count); Assert.assertTrue(eventArrived); executionPlanRuntime.shutdown(); } @Test public void testMaxFunctionExtension5() throws InterruptedException { log.info("MaximumFunctionExecutor TestCase 5"); SiddhiManager siddhiManager = new SiddhiManager(); String inStreamDefinition = "define stream inputStream (price1 long, price2 long, price3 long);"; String query = ("@info(name = 'query1') from inputStream " + "select maximum(price1, price2, price3) as max " + "insert into outputStream;"); ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(inStreamDefinition + query); executionPlanRuntime.addCallback("query1", new QueryCallback() { @Override public void receive(long timeStamp, Event[] inEvents, Event[] removeEvents) { EventPrinter.print(timeStamp, inEvents, removeEvents); eventArrived = true; for (Event event : inEvents) { count++; switch (count) { case 1: Assert.assertEquals(74L, event.getData(0)); break; case 2: Assert.assertEquals(78L, event.getData(0)); break; case 3: Assert.assertEquals(39L, event.getData(0)); break; default: org.junit.Assert.fail(); } } } }); InputHandler inputHandler = executionPlanRuntime.getInputHandler("inputStream"); executionPlanRuntime.start(); inputHandler.send(new Object[]{36, 38, 74}); inputHandler.send(new Object[]{78, 38, 37}); inputHandler.send(new Object[]{9, 39, 38}); Thread.sleep(300); Assert.assertEquals(3, count); Assert.assertTrue(eventArrived); executionPlanRuntime.shutdown(); } @Test public void testMaxFunctionExtension6() throws InterruptedException { log.info("MaximumFunctionExecutor TestCase 6"); SiddhiManager siddhiManager = new SiddhiManager(); String inStreamDefinition = "define stream inputStream (price1 double,price2 double, price3 double);"; String query = ("@info(name = 'query1') from inputStream " + "select maximum(*) as max " + "insert into outputStream;"); ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(inStreamDefinition + query); executionPlanRuntime.addCallback("query1", new QueryCallback() { @Override public void receive(long timeStamp, Event[] inEvents, Event[] removeEvents) { EventPrinter.print(timeStamp, inEvents, removeEvents); eventArrived = true; for (Event event : inEvents) { count++; switch (count) { case 1: Assert.assertEquals(36.75, event.getData(0)); break; case 2: Assert.assertEquals(38.12, event.getData(0)); break; case 3: Assert.assertEquals(39.25, event.getData(0)); break; case 4: Assert.assertEquals(37.75, event.getData(0)); break; case 5: Assert.assertEquals(38.12, event.getData(0)); break; case 6: Assert.assertEquals(40.0, event.getData(0)); break; default: org.junit.Assert.fail(); } } } }); InputHandler inputHandler = executionPlanRuntime.getInputHandler("inputStream"); executionPlanRuntime.start(); inputHandler.send(new Object[]{36, 36.75, 35.75}); inputHandler.send(new Object[]{37.88, 38.12, 37.62}); inputHandler.send(new Object[]{39.00, 39.25, 38.62}); inputHandler.send(new Object[]{36.88, 37.75, 36.75}); inputHandler.send(new Object[]{38.12, 38.12, 37.75}); inputHandler.send(new Object[]{38.12, 40, 37.75}); Thread.sleep(300); Assert.assertEquals(6, count); Assert.assertTrue(eventArrived); executionPlanRuntime.shutdown(); } }
package io.dropwizard.logging; import ch.qos.logback.classic.Level; import ch.qos.logback.classic.Logger; import ch.qos.logback.classic.LoggerContext; import ch.qos.logback.classic.jmx.JMXConfigurator; import ch.qos.logback.classic.jul.LevelChangePropagator; import ch.qos.logback.classic.spi.ILoggingEvent; import ch.qos.logback.core.util.StatusPrinter; import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.logback.InstrumentedAppender; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonTypeName; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.MoreObjects; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import io.dropwizard.jackson.Jackson; import io.dropwizard.logging.async.AsyncAppenderFactory; import io.dropwizard.logging.async.AsyncLoggingEventAppenderFactory; import io.dropwizard.logging.filter.LevelFilterFactory; import io.dropwizard.logging.filter.ThresholdLevelFilterFactory; import io.dropwizard.logging.layout.DropwizardLayoutFactory; import io.dropwizard.logging.layout.LayoutFactory; import javax.management.InstanceAlreadyExistsException; import javax.management.MBeanRegistrationException; import javax.management.MBeanServer; import javax.management.MalformedObjectNameException; import javax.management.NotCompliantMBeanException; import javax.management.ObjectName; import javax.validation.Valid; import javax.validation.constraints.NotNull; import java.io.PrintStream; import java.lang.management.ManagementFactory; import java.util.List; import java.util.Map; import java.util.concurrent.locks.ReentrantLock; import static java.util.Objects.requireNonNull; @JsonTypeName("default") public class DefaultLoggingFactory implements LoggingFactory { private static final ReentrantLock MBEAN_REGISTRATION_LOCK = new ReentrantLock(); private static final ReentrantLock CHANGE_LOGGER_CONTEXT_LOCK = new ReentrantLock(); @NotNull private Level level = Level.INFO; @NotNull private ImmutableMap<String, JsonNode> loggers = ImmutableMap.of(); @Valid @NotNull private ImmutableList<AppenderFactory<ILoggingEvent>> appenders = ImmutableList.of( new ConsoleAppenderFactory<>() ); @JsonIgnore private final LoggerContext loggerContext; @JsonIgnore private final PrintStream configurationErrorsStream; public DefaultLoggingFactory() { this(LoggingUtil.getLoggerContext(), System.err); } @VisibleForTesting DefaultLoggingFactory(LoggerContext loggerContext, PrintStream configurationErrorsStream) { this.loggerContext = requireNonNull(loggerContext); this.configurationErrorsStream = requireNonNull(configurationErrorsStream); } @VisibleForTesting LoggerContext getLoggerContext() { return loggerContext; } @VisibleForTesting PrintStream getConfigurationErrorsStream() { return configurationErrorsStream; } @JsonProperty public Level getLevel() { return level; } @JsonProperty public void setLevel(Level level) { this.level = level; } @JsonProperty public ImmutableMap<String, JsonNode> getLoggers() { return loggers; } @JsonProperty public void setLoggers(Map<String, JsonNode> loggers) { this.loggers = ImmutableMap.copyOf(loggers); } @JsonProperty public ImmutableList<AppenderFactory<ILoggingEvent>> getAppenders() { return appenders; } @JsonProperty public void setAppenders(List<AppenderFactory<ILoggingEvent>> appenders) { this.appenders = ImmutableList.copyOf(appenders); } @Override public void configure(MetricRegistry metricRegistry, String name) { LoggingUtil.hijackJDKLogging(); CHANGE_LOGGER_CONTEXT_LOCK.lock(); final Logger root; try { root = configureLoggers(name); } finally { CHANGE_LOGGER_CONTEXT_LOCK.unlock(); } final LevelFilterFactory<ILoggingEvent> levelFilterFactory = new ThresholdLevelFilterFactory(); final AsyncAppenderFactory<ILoggingEvent> asyncAppenderFactory = new AsyncLoggingEventAppenderFactory(); final LayoutFactory<ILoggingEvent> layoutFactory = new DropwizardLayoutFactory(); for (AppenderFactory<ILoggingEvent> output : appenders) { root.addAppender(output.build(loggerContext, name, layoutFactory, levelFilterFactory, asyncAppenderFactory)); } StatusPrinter.setPrintStream(configurationErrorsStream); try { StatusPrinter.printIfErrorsOccured(loggerContext); } finally { StatusPrinter.setPrintStream(System.out); } final MBeanServer server = ManagementFactory.getPlatformMBeanServer(); MBEAN_REGISTRATION_LOCK.lock(); try { final ObjectName objectName = new ObjectName("io.dropwizard:type=Logging"); if (!server.isRegistered(objectName)) { server.registerMBean(new JMXConfigurator(loggerContext, server, objectName), objectName); } } catch (MalformedObjectNameException | InstanceAlreadyExistsException | NotCompliantMBeanException | MBeanRegistrationException e) { throw new RuntimeException(e); } finally { MBEAN_REGISTRATION_LOCK.unlock(); } configureInstrumentation(root, metricRegistry); } @Override public void stop() { // Should acquire the lock to avoid concurrent listener changes CHANGE_LOGGER_CONTEXT_LOCK.lock(); try { loggerContext.stop(); } finally { CHANGE_LOGGER_CONTEXT_LOCK.unlock(); } } private void configureInstrumentation(Logger root, MetricRegistry metricRegistry) { final InstrumentedAppender appender = new InstrumentedAppender(metricRegistry); appender.setContext(loggerContext); appender.start(); root.addAppender(appender); } private Logger configureLoggers(String name) { final Logger root = loggerContext.getLogger(org.slf4j.Logger.ROOT_LOGGER_NAME); loggerContext.reset(); final LevelChangePropagator propagator = new LevelChangePropagator(); propagator.setContext(loggerContext); propagator.setResetJUL(true); loggerContext.addListener(propagator); root.setLevel(level); final LevelFilterFactory<ILoggingEvent> levelFilterFactory = new ThresholdLevelFilterFactory(); final AsyncAppenderFactory<ILoggingEvent> asyncAppenderFactory = new AsyncLoggingEventAppenderFactory(); final LayoutFactory<ILoggingEvent> layoutFactory = new DropwizardLayoutFactory(); for (Map.Entry<String, JsonNode> entry : loggers.entrySet()) { final Logger logger = loggerContext.getLogger(entry.getKey()); final JsonNode jsonNode = entry.getValue(); if (jsonNode.isTextual()) { // Just a level as a string logger.setLevel(Level.valueOf(jsonNode.asText())); } else if (jsonNode.isObject()) { // A level and an appender final LoggerConfiguration configuration; try { configuration = Jackson.newObjectMapper().treeToValue(jsonNode, LoggerConfiguration.class); } catch (JsonProcessingException e) { throw new IllegalArgumentException("Wrong format of logger '" + entry.getKey() + "'", e); } logger.setLevel(configuration.getLevel()); logger.setAdditive(configuration.isAdditive()); for (AppenderFactory<ILoggingEvent> appender : configuration.getAppenders()) { logger.addAppender(appender.build(loggerContext, name, layoutFactory, levelFilterFactory, asyncAppenderFactory)); } } else { throw new IllegalArgumentException("Unsupported format of logger '" + entry.getKey() + "'"); } } return root; } @Override public String toString() { return MoreObjects.toStringHelper(this) .add("level", level) .add("loggers", loggers) .add("appenders", appenders) .toString(); } }
package org.odata4j.test.integration; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.util.Hashtable; import java.util.Map; import javax.ws.rs.core.HttpHeaders; import javax.ws.rs.core.MediaType; import org.apache.http.Header; import org.apache.http.HttpEntity; import org.apache.http.HttpHost; import org.apache.http.HttpResponse; import org.apache.http.client.HttpClient; import org.apache.http.client.methods.HttpDelete; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPut; import org.apache.http.client.methods.HttpUriRequest; import org.apache.http.conn.params.ConnRoutePNames; import org.apache.http.entity.InputStreamEntity; import org.apache.http.impl.client.DefaultHttpClient; import org.apache.http.message.BasicHeader; import org.apache.http.util.EntityUtils; import org.odata4j.consumer.ODataConsumer; import org.odata4j.consumer.behaviors.OClientBehavior; import org.odata4j.core.ODataConstants.Headers; import org.odata4j.core.ODataHttpMethod; import org.odata4j.core.Throwables; import org.odata4j.examples.cxf.consumer.ODataCxfConsumer; import org.odata4j.examples.cxf.consumer.ODataCxfConsumer.Builder; import org.odata4j.examples.cxf.producer.server.ODataCxfServer; import org.odata4j.format.FormatType; import org.odata4j.producer.resources.DefaultODataApplication; import org.odata4j.producer.resources.RootApplication; import org.odata4j.producer.server.ODataServer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class CxfRuntimeFacade implements RuntimeFacade { private static final Logger LOGGER = LoggerFactory.getLogger(CxfRuntimeFacade.class); @Override public void hostODataServer(String baseUri) { try { ODataServer server = this.startODataServer(baseUri); System.out.println("Press any key to exit"); new BufferedReader(new InputStreamReader(System.in)).readLine(); server.stop(); } catch (IOException e) { throw Throwables.propagate(e); } } @Override public ODataServer startODataServer(String baseUri) { return this.createODataServer(baseUri).start(); } @Override public ODataServer createODataServer(String baseUri) { return new ODataCxfServer(baseUri, DefaultODataApplication.class, RootApplication.class); } @Override public ODataConsumer createODataConsumer(String endpointUri, FormatType format, OClientBehavior... clientBehaviors) { Builder builder = ODataCxfConsumer.newBuilder(endpointUri); if (format != null) { builder = builder.setFormatType(format); } if (clientBehaviors != null) { builder = builder.setClientBehaviors(clientBehaviors); } return builder.build(); } @Override public ResponseData acceptAndReturn(String uri, MediaType mediaType) { uri = uri.replace(" ", "%20"); return this.getResource(ODataHttpMethod.GET, uri, null, mediaType, null); } @Override public ResponseData getWebResource(String uri, String accept) { uri = uri.replace(" ", "%20"); Hashtable<String, Object> header = new Hashtable<String, Object>(); header.put("accept", accept); return this.getResource(ODataHttpMethod.GET, uri, null, null, header); } @Override public void accept(String uri, MediaType mediaType) { // no effect??? } @Override public ResponseData getWebResource(String uri) { return this.getResource(ODataHttpMethod.GET, uri, null, null, null); } private ResponseData getResource(ODataHttpMethod method, String uri, InputStream content, MediaType mediaType, Map<String, Object> headers) { String resource = ""; try { HttpClient httpClient = new DefaultHttpClient(); if (System.getProperties().containsKey("http.proxyHost") && System.getProperties().containsKey("http.proxyPort")) { // support proxy settings String hostName = System.getProperties().getProperty("http.proxyHost"); String hostPort = System.getProperties().getProperty("http.proxyPort"); HttpHost proxy = new HttpHost(hostName, Integer.parseInt(hostPort)); httpClient.getParams().setParameter(ConnRoutePNames.DEFAULT_PROXY, proxy); } HttpUriRequest httpRequest; switch (method) { case GET: httpRequest = new HttpGet(uri); break; case DELETE: httpRequest = new HttpDelete(uri); break; case PATCH: HttpPost patch = new HttpPost(uri); if (content != null) patch.setEntity(new InputStreamEntity(content, -1)); patch.setHeader(Headers.X_HTTP_METHOD, "PATCH"); httpRequest = patch; break; case MERGE: HttpPost merge = new HttpPost(uri); if (content != null) merge.setEntity(new InputStreamEntity(content, -1)); merge.setHeader(Headers.X_HTTP_METHOD, "MERGE"); httpRequest = merge; break; case PUT: HttpPut put = new HttpPut(uri); if (content != null) put.setEntity(new InputStreamEntity(content, -1)); httpRequest = put; break; case POST: HttpPost post = new HttpPost(uri); if (content != null) post.setEntity(new InputStreamEntity(content, -1)); httpRequest = post; break; default: throw new RuntimeException("Unsupported method: " + method); } if (headers != null) { for (String key : headers.keySet()) { String value = (String) headers.get(key).toString(); Header header = new BasicHeader(key, value); httpRequest.addHeader(header); } } if (mediaType != null) { if (content != null) { httpRequest.addHeader(HttpHeaders.CONTENT_TYPE, mediaType.toString()); } if (method == ODataHttpMethod.GET) { httpRequest.addHeader(HttpHeaders.ACCEPT, mediaType.toString()); } } // Execute the request HttpResponse response = httpClient.execute(httpRequest); // Examine the response status CxfRuntimeFacade.LOGGER.debug(response.getStatusLine().toString()); // Get hold of the response entity HttpEntity entity = response.getEntity(); // If the response does not enclose an entity, there is no need // to worry about connection release if (entity != null) { resource = EntityUtils.toString(entity); } return new ResponseData(response.getStatusLine().getStatusCode(), resource); } catch (Exception e) { throw Throwables.propagate(e); } } @Override public ResponseData postWebResource(String uri, InputStream content, MediaType mediaType, Map<String, Object> headers) { return this.getResource(ODataHttpMethod.POST, uri, content, mediaType, headers); } @Override public ResponseData putWebResource(String uri, InputStream content, MediaType mediaType, Map<String, Object> headers) { return this.getResource(ODataHttpMethod.PUT, uri, content, mediaType, headers); } @Override public ResponseData mergeWebResource(String uri, InputStream content, MediaType mediaType, Map<String, Object> headers) { return this.getResource(ODataHttpMethod.MERGE, uri, content, mediaType, headers); } @Override public ResponseData patchWebResource(String uri, InputStream content, MediaType mediaType, Map<String, Object> headers) { return this.getResource(ODataHttpMethod.PATCH, uri, content, mediaType, headers); } @Override public ResponseData getWebResource(String uri, InputStream content, MediaType mediaType, Map<String, Object> headers) { return this.getResource(ODataHttpMethod.GET, uri, content, mediaType, headers); } @Override public ResponseData deleteWebResource(String uri, InputStream content, MediaType mediaType, Map<String, Object> headers) { return this.getResource(ODataHttpMethod.DELETE, uri, content, mediaType, headers); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.math3.linear; import java.util.Arrays; import java.util.Random; import org.junit.Test; import org.junit.Assert; import org.apache.commons.math3.TestUtils; import org.apache.commons.math3.util.FastMath; import org.apache.commons.math3.exception.NullArgumentException; import org.apache.commons.math3.exception.OutOfRangeException; import org.apache.commons.math3.exception.NoDataException; import org.apache.commons.math3.exception.NumberIsTooSmallException; import org.apache.commons.math3.exception.MathIllegalArgumentException; /** * Test cases for the {@link BlockRealMatrix} class. * * @version $Id: BlockRealMatrixTest.java 1364030 2012-07-21 01:10:04Z erans $ */ public final class BlockRealMatrixTest { // 3 x 3 identity matrix protected double[][] id = { {1d,0d,0d}, {0d,1d,0d}, {0d,0d,1d} }; // Test data for group operations protected double[][] testData = { {1d,2d,3d}, {2d,5d,3d}, {1d,0d,8d} }; protected double[][] testDataLU = {{2d, 5d, 3d}, {.5d, -2.5d, 6.5d}, {0.5d, 0.2d, .2d}}; protected double[][] testDataPlus2 = { {3d,4d,5d}, {4d,7d,5d}, {3d,2d,10d} }; protected double[][] testDataMinus = { {-1d,-2d,-3d}, {-2d,-5d,-3d}, {-1d,0d,-8d} }; protected double[] testDataRow1 = {1d,2d,3d}; protected double[] testDataCol3 = {3d,3d,8d}; protected double[][] testDataInv = { {-40d,16d,9d}, {13d,-5d,-3d}, {5d,-2d,-1d} }; protected double[] preMultTest = {8,12,33}; protected double[][] testData2 ={ {1d,2d,3d}, {2d,5d,3d}}; protected double[][] testData2T = { {1d,2d}, {2d,5d}, {3d,3d}}; protected double[][] testDataPlusInv = { {-39d,18d,12d}, {15d,0d,0d}, {6d,-2d,7d} }; // lu decomposition tests protected double[][] luData = { {2d,3d,3d}, {0d,5d,7d}, {6d,9d,8d} }; protected double[][] luDataLUDecomposition = { {6d,9d,8d}, {0d,5d,7d}, {0.33333333333333,0d,0.33333333333333} }; // singular matrices protected double[][] singular = { {2d,3d}, {2d,3d} }; protected double[][] bigSingular = {{1d,2d,3d,4d}, {2d,5d,3d,4d}, {7d,3d,256d,1930d}, {3d,7d,6d,8d}}; // 4th row = 1st + 2nd protected double[][] detData = { {1d,2d,3d}, {4d,5d,6d}, {7d,8d,10d} }; protected double[][] detData2 = { {1d, 3d}, {2d, 4d}}; // vectors protected double[] testVector = {1,2,3}; protected double[] testVector2 = {1,2,3,4}; // submatrix accessor tests protected double[][] subTestData = {{1, 2, 3, 4}, {1.5, 2.5, 3.5, 4.5}, {2, 4, 6, 8}, {4, 5, 6, 7}}; // array selections protected double[][] subRows02Cols13 = { {2, 4}, {4, 8}}; protected double[][] subRows03Cols12 = { {2, 3}, {5, 6}}; protected double[][] subRows03Cols123 = { {2, 3, 4} , {5, 6, 7}}; // effective permutations protected double[][] subRows20Cols123 = { {4, 6, 8} , {2, 3, 4}}; protected double[][] subRows31Cols31 = {{7, 5}, {4.5, 2.5}}; // contiguous ranges protected double[][] subRows01Cols23 = {{3,4} , {3.5, 4.5}}; protected double[][] subRows23Cols00 = {{2} , {4}}; protected double[][] subRows00Cols33 = {{4}}; // row matrices protected double[][] subRow0 = {{1,2,3,4}}; protected double[][] subRow3 = {{4,5,6,7}}; // column matrices protected double[][] subColumn1 = {{2}, {2.5}, {4}, {5}}; protected double[][] subColumn3 = {{4}, {4.5}, {8}, {7}}; // tolerances protected double entryTolerance = 10E-16; protected double normTolerance = 10E-14; /** test dimensions */ @Test public void testDimensions() { BlockRealMatrix m = new BlockRealMatrix(testData); BlockRealMatrix m2 = new BlockRealMatrix(testData2); Assert.assertEquals("testData row dimension",3,m.getRowDimension()); Assert.assertEquals("testData column dimension",3,m.getColumnDimension()); Assert.assertTrue("testData is square",m.isSquare()); Assert.assertEquals("testData2 row dimension",m2.getRowDimension(),2); Assert.assertEquals("testData2 column dimension",m2.getColumnDimension(),3); Assert.assertTrue("testData2 is not square",!m2.isSquare()); } /** test copy functions */ @Test public void testCopyFunctions() { Random r = new Random(66636328996002l); BlockRealMatrix m1 = createRandomMatrix(r, 47, 83); BlockRealMatrix m2 = new BlockRealMatrix(m1.getData()); Assert.assertEquals(m1, m2); BlockRealMatrix m3 = new BlockRealMatrix(testData); BlockRealMatrix m4 = new BlockRealMatrix(m3.getData()); Assert.assertEquals(m3, m4); } /** test add */ @Test public void testAdd() { BlockRealMatrix m = new BlockRealMatrix(testData); BlockRealMatrix mInv = new BlockRealMatrix(testDataInv); RealMatrix mPlusMInv = m.add(mInv); double[][] sumEntries = mPlusMInv.getData(); for (int row = 0; row < m.getRowDimension(); row++) { for (int col = 0; col < m.getColumnDimension(); col++) { Assert.assertEquals("sum entry entry", testDataPlusInv[row][col],sumEntries[row][col], entryTolerance); } } } /** test add failure */ @Test public void testAddFail() { BlockRealMatrix m = new BlockRealMatrix(testData); BlockRealMatrix m2 = new BlockRealMatrix(testData2); try { m.add(m2); Assert.fail("MathIllegalArgumentException expected"); } catch (MathIllegalArgumentException ex) { // ignored } } /** test norm */ @Test public void testNorm() { BlockRealMatrix m = new BlockRealMatrix(testData); BlockRealMatrix m2 = new BlockRealMatrix(testData2); Assert.assertEquals("testData norm",14d,m.getNorm(),entryTolerance); Assert.assertEquals("testData2 norm",7d,m2.getNorm(),entryTolerance); } /** test Frobenius norm */ @Test public void testFrobeniusNorm() { BlockRealMatrix m = new BlockRealMatrix(testData); BlockRealMatrix m2 = new BlockRealMatrix(testData2); Assert.assertEquals("testData Frobenius norm", FastMath.sqrt(117.0), m.getFrobeniusNorm(), entryTolerance); Assert.assertEquals("testData2 Frobenius norm", FastMath.sqrt(52.0), m2.getFrobeniusNorm(), entryTolerance); } /** test m-n = m + -n */ @Test public void testPlusMinus() { BlockRealMatrix m = new BlockRealMatrix(testData); BlockRealMatrix m2 = new BlockRealMatrix(testDataInv); assertClose(m.subtract(m2), m2.scalarMultiply(-1d).add(m), entryTolerance); try { m.subtract(new BlockRealMatrix(testData2)); Assert.fail("Expecting illegalArgumentException"); } catch (MathIllegalArgumentException ex) { // ignored } } /** test multiply */ @Test public void testMultiply() { BlockRealMatrix m = new BlockRealMatrix(testData); BlockRealMatrix mInv = new BlockRealMatrix(testDataInv); BlockRealMatrix identity = new BlockRealMatrix(id); BlockRealMatrix m2 = new BlockRealMatrix(testData2); assertClose(m.multiply(mInv), identity, entryTolerance); assertClose(mInv.multiply(m), identity, entryTolerance); assertClose(m.multiply(identity), m, entryTolerance); assertClose(identity.multiply(mInv), mInv, entryTolerance); assertClose(m2.multiply(identity), m2, entryTolerance); try { m.multiply(new BlockRealMatrix(bigSingular)); Assert.fail("Expecting illegalArgumentException"); } catch (MathIllegalArgumentException ex) { // expected } } @Test public void testSeveralBlocks() { RealMatrix m = new BlockRealMatrix(35, 71); for (int i = 0; i < m.getRowDimension(); ++i) { for (int j = 0; j < m.getColumnDimension(); ++j) { m.setEntry(i, j, i + j / 1024.0); } } RealMatrix mT = m.transpose(); Assert.assertEquals(m.getRowDimension(), mT.getColumnDimension()); Assert.assertEquals(m.getColumnDimension(), mT.getRowDimension()); for (int i = 0; i < mT.getRowDimension(); ++i) { for (int j = 0; j < mT.getColumnDimension(); ++j) { Assert.assertEquals(m.getEntry(j, i), mT.getEntry(i, j), 0); } } RealMatrix mPm = m.add(m); for (int i = 0; i < mPm.getRowDimension(); ++i) { for (int j = 0; j < mPm.getColumnDimension(); ++j) { Assert.assertEquals(2 * m.getEntry(i, j), mPm.getEntry(i, j), 0); } } RealMatrix mPmMm = mPm.subtract(m); for (int i = 0; i < mPmMm.getRowDimension(); ++i) { for (int j = 0; j < mPmMm.getColumnDimension(); ++j) { Assert.assertEquals(m.getEntry(i, j), mPmMm.getEntry(i, j), 0); } } RealMatrix mTm = mT.multiply(m); for (int i = 0; i < mTm.getRowDimension(); ++i) { for (int j = 0; j < mTm.getColumnDimension(); ++j) { double sum = 0; for (int k = 0; k < mT.getColumnDimension(); ++k) { sum += (k + i / 1024.0) * (k + j / 1024.0); } Assert.assertEquals(sum, mTm.getEntry(i, j), 0); } } RealMatrix mmT = m.multiply(mT); for (int i = 0; i < mmT.getRowDimension(); ++i) { for (int j = 0; j < mmT.getColumnDimension(); ++j) { double sum = 0; for (int k = 0; k < m.getColumnDimension(); ++k) { sum += (i + k / 1024.0) * (j + k / 1024.0); } Assert.assertEquals(sum, mmT.getEntry(i, j), 0); } } RealMatrix sub1 = m.getSubMatrix(2, 9, 5, 20); for (int i = 0; i < sub1.getRowDimension(); ++i) { for (int j = 0; j < sub1.getColumnDimension(); ++j) { Assert.assertEquals((i + 2) + (j + 5) / 1024.0, sub1.getEntry(i, j), 0); } } RealMatrix sub2 = m.getSubMatrix(10, 12, 3, 70); for (int i = 0; i < sub2.getRowDimension(); ++i) { for (int j = 0; j < sub2.getColumnDimension(); ++j) { Assert.assertEquals((i + 10) + (j + 3) / 1024.0, sub2.getEntry(i, j), 0); } } RealMatrix sub3 = m.getSubMatrix(30, 34, 0, 5); for (int i = 0; i < sub3.getRowDimension(); ++i) { for (int j = 0; j < sub3.getColumnDimension(); ++j) { Assert.assertEquals((i + 30) + (j + 0) / 1024.0, sub3.getEntry(i, j), 0); } } RealMatrix sub4 = m.getSubMatrix(30, 32, 62, 65); for (int i = 0; i < sub4.getRowDimension(); ++i) { for (int j = 0; j < sub4.getColumnDimension(); ++j) { Assert.assertEquals((i + 30) + (j + 62) / 1024.0, sub4.getEntry(i, j), 0); } } } //Additional Test for BlockRealMatrixTest.testMultiply private double[][] d3 = new double[][] {{1,2,3,4},{5,6,7,8}}; private double[][] d4 = new double[][] {{1},{2},{3},{4}}; private double[][] d5 = new double[][] {{30},{70}}; @Test public void testMultiply2() { RealMatrix m3 = new BlockRealMatrix(d3); RealMatrix m4 = new BlockRealMatrix(d4); RealMatrix m5 = new BlockRealMatrix(d5); assertClose(m3.multiply(m4), m5, entryTolerance); } /** test trace */ @Test public void testTrace() { RealMatrix m = new BlockRealMatrix(id); Assert.assertEquals("identity trace",3d,m.getTrace(),entryTolerance); m = new BlockRealMatrix(testData2); try { m.getTrace(); Assert.fail("Expecting NonSquareMatrixException"); } catch (NonSquareMatrixException ex) { // ignored } } /** test scalarAdd */ @Test public void testScalarAdd() { RealMatrix m = new BlockRealMatrix(testData); assertClose(new BlockRealMatrix(testDataPlus2), m.scalarAdd(2d), entryTolerance); } /** test operate */ @Test public void testOperate() { RealMatrix m = new BlockRealMatrix(id); assertClose(testVector, m.operate(testVector), entryTolerance); assertClose(testVector, m.operate(new ArrayRealVector(testVector)).toArray(), entryTolerance); m = new BlockRealMatrix(bigSingular); try { m.operate(testVector); Assert.fail("Expecting illegalArgumentException"); } catch (MathIllegalArgumentException ex) { // ignored } } @Test public void testOperateLarge() { int p = (7 * BlockRealMatrix.BLOCK_SIZE) / 2; int q = (5 * BlockRealMatrix.BLOCK_SIZE) / 2; int r = 3 * BlockRealMatrix.BLOCK_SIZE; Random random = new Random(111007463902334l); RealMatrix m1 = createRandomMatrix(random, p, q); RealMatrix m2 = createRandomMatrix(random, q, r); RealMatrix m1m2 = m1.multiply(m2); for (int i = 0; i < r; ++i) { checkArrays(m1m2.getColumn(i), m1.operate(m2.getColumn(i))); } } @Test public void testOperatePremultiplyLarge() { int p = (7 * BlockRealMatrix.BLOCK_SIZE) / 2; int q = (5 * BlockRealMatrix.BLOCK_SIZE) / 2; int r = 3 * BlockRealMatrix.BLOCK_SIZE; Random random = new Random(111007463902334l); RealMatrix m1 = createRandomMatrix(random, p, q); RealMatrix m2 = createRandomMatrix(random, q, r); RealMatrix m1m2 = m1.multiply(m2); for (int i = 0; i < p; ++i) { checkArrays(m1m2.getRow(i), m2.preMultiply(m1.getRow(i))); } } /** test issue MATH-209 */ @Test public void testMath209() { RealMatrix a = new BlockRealMatrix(new double[][] { { 1, 2 }, { 3, 4 }, { 5, 6 } }); double[] b = a.operate(new double[] { 1, 1 }); Assert.assertEquals(a.getRowDimension(), b.length); Assert.assertEquals( 3.0, b[0], 1.0e-12); Assert.assertEquals( 7.0, b[1], 1.0e-12); Assert.assertEquals(11.0, b[2], 1.0e-12); } /** test transpose */ @Test public void testTranspose() { RealMatrix m = new BlockRealMatrix(testData); RealMatrix mIT = new LUDecomposition(m).getSolver().getInverse().transpose(); RealMatrix mTI = new LUDecomposition(m.transpose()).getSolver().getInverse(); assertClose(mIT, mTI, normTolerance); m = new BlockRealMatrix(testData2); RealMatrix mt = new BlockRealMatrix(testData2T); assertClose(mt, m.transpose(), normTolerance); } /** test preMultiply by vector */ @Test public void testPremultiplyVector() { RealMatrix m = new BlockRealMatrix(testData); assertClose(m.preMultiply(testVector), preMultTest, normTolerance); assertClose(m.preMultiply(new ArrayRealVector(testVector).toArray()), preMultTest, normTolerance); m = new BlockRealMatrix(bigSingular); try { m.preMultiply(testVector); Assert.fail("expecting MathIllegalArgumentException"); } catch (MathIllegalArgumentException ex) { // ignored } } @Test public void testPremultiply() { RealMatrix m3 = new BlockRealMatrix(d3); RealMatrix m4 = new BlockRealMatrix(d4); RealMatrix m5 = new BlockRealMatrix(d5); assertClose(m4.preMultiply(m3), m5, entryTolerance); BlockRealMatrix m = new BlockRealMatrix(testData); BlockRealMatrix mInv = new BlockRealMatrix(testDataInv); BlockRealMatrix identity = new BlockRealMatrix(id); assertClose(m.preMultiply(mInv), identity, entryTolerance); assertClose(mInv.preMultiply(m), identity, entryTolerance); assertClose(m.preMultiply(identity), m, entryTolerance); assertClose(identity.preMultiply(mInv), mInv, entryTolerance); try { m.preMultiply(new BlockRealMatrix(bigSingular)); Assert.fail("Expecting illegalArgumentException"); } catch (MathIllegalArgumentException ex) { // ignored } } @Test public void testGetVectors() { RealMatrix m = new BlockRealMatrix(testData); assertClose(m.getRow(0), testDataRow1, entryTolerance); assertClose(m.getColumn(2), testDataCol3, entryTolerance); try { m.getRow(10); Assert.fail("expecting OutOfRangeException"); } catch (OutOfRangeException ex) { // ignored } try { m.getColumn(-1); Assert.fail("expecting OutOfRangeException"); } catch (OutOfRangeException ex) { // ignored } } @Test public void testGetEntry() { RealMatrix m = new BlockRealMatrix(testData); Assert.assertEquals("get entry",m.getEntry(0,1),2d,entryTolerance); try { m.getEntry(10, 4); Assert.fail ("Expecting OutOfRangeException"); } catch (OutOfRangeException ex) { // expected } } /** test examples in user guide */ @Test public void testExamples() { // Create a real matrix with two rows and three columns double[][] matrixData = { {1d,2d,3d}, {2d,5d,3d}}; RealMatrix m = new BlockRealMatrix(matrixData); // One more with three rows, two columns double[][] matrixData2 = { {1d,2d}, {2d,5d}, {1d, 7d}}; RealMatrix n = new BlockRealMatrix(matrixData2); // Now multiply m by n RealMatrix p = m.multiply(n); Assert.assertEquals(2, p.getRowDimension()); Assert.assertEquals(2, p.getColumnDimension()); // Invert p RealMatrix pInverse = new LUDecomposition(p).getSolver().getInverse(); Assert.assertEquals(2, pInverse.getRowDimension()); Assert.assertEquals(2, pInverse.getColumnDimension()); // Solve example double[][] coefficientsData = {{2, 3, -2}, {-1, 7, 6}, {4, -3, -5}}; RealMatrix coefficients = new BlockRealMatrix(coefficientsData); RealVector constants = new ArrayRealVector(new double[]{1, -2, 1}, false); RealVector solution = new LUDecomposition(coefficients).getSolver().solve(constants); final double cst0 = constants.getEntry(0); final double cst1 = constants.getEntry(1); final double cst2 = constants.getEntry(2); final double sol0 = solution.getEntry(0); final double sol1 = solution.getEntry(1); final double sol2 = solution.getEntry(2); Assert.assertEquals(2 * sol0 + 3 * sol1 -2 * sol2, cst0, 1E-12); Assert.assertEquals(-1 * sol0 + 7 * sol1 + 6 * sol2, cst1, 1E-12); Assert.assertEquals(4 * sol0 - 3 * sol1 -5 * sol2, cst2, 1E-12); } // test submatrix accessors @Test public void testGetSubMatrix() { RealMatrix m = new BlockRealMatrix(subTestData); checkGetSubMatrix(m, subRows23Cols00, 2 , 3 , 0, 0); checkGetSubMatrix(m, subRows00Cols33, 0 , 0 , 3, 3); checkGetSubMatrix(m, subRows01Cols23, 0 , 1 , 2, 3); checkGetSubMatrix(m, subRows02Cols13, new int[] { 0, 2 }, new int[] { 1, 3 }); checkGetSubMatrix(m, subRows03Cols12, new int[] { 0, 3 }, new int[] { 1, 2 }); checkGetSubMatrix(m, subRows03Cols123, new int[] { 0, 3 }, new int[] { 1, 2, 3 }); checkGetSubMatrix(m, subRows20Cols123, new int[] { 2, 0 }, new int[] { 1, 2, 3 }); checkGetSubMatrix(m, subRows31Cols31, new int[] { 3, 1 }, new int[] { 3, 1 }); checkGetSubMatrix(m, subRows31Cols31, new int[] { 3, 1 }, new int[] { 3, 1 }); checkGetSubMatrix(m, null, 1, 0, 2, 4); checkGetSubMatrix(m, null, -1, 1, 2, 2); checkGetSubMatrix(m, null, 1, 0, 2, 2); checkGetSubMatrix(m, null, 1, 0, 2, 4); checkGetSubMatrix(m, null, new int[] {}, new int[] { 0 }); checkGetSubMatrix(m, null, new int[] { 0 }, new int[] { 4 }); } private void checkGetSubMatrix(RealMatrix m, double[][] reference, int startRow, int endRow, int startColumn, int endColumn) { try { RealMatrix sub = m.getSubMatrix(startRow, endRow, startColumn, endColumn); if (reference != null) { Assert.assertEquals(new BlockRealMatrix(reference), sub); } else { Assert.fail("Expecting OutOfRangeException or NumberIsTooSmallException or NoDataException"); } } catch (OutOfRangeException e) { if (reference != null) { throw e; } } catch (NumberIsTooSmallException e) { if (reference != null) { throw e; } } catch (NoDataException e) { if (reference != null) { throw e; } } } private void checkGetSubMatrix(RealMatrix m, double[][] reference, int[] selectedRows, int[] selectedColumns) { try { RealMatrix sub = m.getSubMatrix(selectedRows, selectedColumns); if (reference != null) { Assert.assertEquals(new BlockRealMatrix(reference), sub); } else { Assert.fail("Expecting OutOfRangeException or NumberIsTooSmallExceptiono r NoDataException"); } } catch (OutOfRangeException e) { if (reference != null) { throw e; } } catch (NumberIsTooSmallException e) { if (reference != null) { throw e; } } catch (NoDataException e) { if (reference != null) { throw e; } } } @Test public void testGetSetMatrixLarge() { int n = 3 * BlockRealMatrix.BLOCK_SIZE; RealMatrix m = new BlockRealMatrix(n, n); RealMatrix sub = new BlockRealMatrix(n - 4, n - 4).scalarAdd(1); m.setSubMatrix(sub.getData(), 2, 2); for (int i = 0; i < n; ++i) { for (int j = 0; j < n; ++j) { if ((i < 2) || (i > n - 3) || (j < 2) || (j > n - 3)) { Assert.assertEquals(0.0, m.getEntry(i, j), 0.0); } else { Assert.assertEquals(1.0, m.getEntry(i, j), 0.0); } } } Assert.assertEquals(sub, m.getSubMatrix(2, n - 3, 2, n - 3)); } @Test public void testCopySubMatrix() { RealMatrix m = new BlockRealMatrix(subTestData); checkCopy(m, subRows23Cols00, 2 , 3 , 0, 0); checkCopy(m, subRows00Cols33, 0 , 0 , 3, 3); checkCopy(m, subRows01Cols23, 0 , 1 , 2, 3); checkCopy(m, subRows02Cols13, new int[] { 0, 2 }, new int[] { 1, 3 }); checkCopy(m, subRows03Cols12, new int[] { 0, 3 }, new int[] { 1, 2 }); checkCopy(m, subRows03Cols123, new int[] { 0, 3 }, new int[] { 1, 2, 3 }); checkCopy(m, subRows20Cols123, new int[] { 2, 0 }, new int[] { 1, 2, 3 }); checkCopy(m, subRows31Cols31, new int[] { 3, 1 }, new int[] { 3, 1 }); checkCopy(m, subRows31Cols31, new int[] { 3, 1 }, new int[] { 3, 1 }); checkCopy(m, null, 1, 0, 2, 4); checkCopy(m, null, -1, 1, 2, 2); checkCopy(m, null, 1, 0, 2, 2); checkCopy(m, null, 1, 0, 2, 4); checkCopy(m, null, new int[] {}, new int[] { 0 }); checkCopy(m, null, new int[] { 0 }, new int[] { 4 }); } private void checkCopy(RealMatrix m, double[][] reference, int startRow, int endRow, int startColumn, int endColumn) { try { double[][] sub = (reference == null) ? new double[1][1] : new double[reference.length][reference[0].length]; m.copySubMatrix(startRow, endRow, startColumn, endColumn, sub); if (reference != null) { Assert.assertEquals(new BlockRealMatrix(reference), new BlockRealMatrix(sub)); } else { Assert.fail("Expecting OutOfRangeException or NumberIsTooSmallException or NoDataException"); } } catch (OutOfRangeException e) { if (reference != null) { throw e; } } catch (NumberIsTooSmallException e) { if (reference != null) { throw e; } } catch (NoDataException e) { if (reference != null) { throw e; } } } private void checkCopy(RealMatrix m, double[][] reference, int[] selectedRows, int[] selectedColumns) { try { double[][] sub = (reference == null) ? new double[1][1] : new double[reference.length][reference[0].length]; m.copySubMatrix(selectedRows, selectedColumns, sub); if (reference != null) { Assert.assertEquals(new BlockRealMatrix(reference), new BlockRealMatrix(sub)); } else { Assert.fail("Expecting OutOfRangeException or NumberIsTooSmallException or NoDataException"); } } catch (OutOfRangeException e) { if (reference != null) { throw e; } } catch (NumberIsTooSmallException e) { if (reference != null) { throw e; } } catch (NoDataException e) { if (reference != null) { throw e; } } } @Test public void testGetRowMatrix() { RealMatrix m = new BlockRealMatrix(subTestData); RealMatrix mRow0 = new BlockRealMatrix(subRow0); RealMatrix mRow3 = new BlockRealMatrix(subRow3); Assert.assertEquals("Row0", mRow0, m.getRowMatrix(0)); Assert.assertEquals("Row3", mRow3, m.getRowMatrix(3)); try { m.getRowMatrix(-1); Assert.fail("Expecting OutOfRangeException"); } catch (OutOfRangeException ex) { // expected } try { m.getRowMatrix(4); Assert.fail("Expecting OutOfRangeException"); } catch (OutOfRangeException ex) { // expected } } @Test public void testSetRowMatrix() { RealMatrix m = new BlockRealMatrix(subTestData); RealMatrix mRow3 = new BlockRealMatrix(subRow3); Assert.assertNotSame(mRow3, m.getRowMatrix(0)); m.setRowMatrix(0, mRow3); Assert.assertEquals(mRow3, m.getRowMatrix(0)); try { m.setRowMatrix(-1, mRow3); Assert.fail("Expecting OutOfRangeException"); } catch (OutOfRangeException ex) { // expected } try { m.setRowMatrix(0, m); Assert.fail("Expecting MatrixDimensionMismatchException"); } catch (MatrixDimensionMismatchException ex) { // expected } } @Test public void testGetSetRowMatrixLarge() { int n = 3 * BlockRealMatrix.BLOCK_SIZE; RealMatrix m = new BlockRealMatrix(n, n); RealMatrix sub = new BlockRealMatrix(1, n).scalarAdd(1); m.setRowMatrix(2, sub); for (int i = 0; i < n; ++i) { for (int j = 0; j < n; ++j) { if (i != 2) { Assert.assertEquals(0.0, m.getEntry(i, j), 0.0); } else { Assert.assertEquals(1.0, m.getEntry(i, j), 0.0); } } } Assert.assertEquals(sub, m.getRowMatrix(2)); } @Test public void testGetColumnMatrix() { RealMatrix m = new BlockRealMatrix(subTestData); RealMatrix mColumn1 = new BlockRealMatrix(subColumn1); RealMatrix mColumn3 = new BlockRealMatrix(subColumn3); Assert.assertEquals(mColumn1, m.getColumnMatrix(1)); Assert.assertEquals(mColumn3, m.getColumnMatrix(3)); try { m.getColumnMatrix(-1); Assert.fail("Expecting OutOfRangeException"); } catch (OutOfRangeException ex) { // expected } try { m.getColumnMatrix(4); Assert.fail("Expecting OutOfRangeException"); } catch (OutOfRangeException ex) { // expected } } @Test public void testSetColumnMatrix() { RealMatrix m = new BlockRealMatrix(subTestData); RealMatrix mColumn3 = new BlockRealMatrix(subColumn3); Assert.assertNotSame(mColumn3, m.getColumnMatrix(1)); m.setColumnMatrix(1, mColumn3); Assert.assertEquals(mColumn3, m.getColumnMatrix(1)); try { m.setColumnMatrix(-1, mColumn3); Assert.fail("Expecting OutOfRangeException"); } catch (OutOfRangeException ex) { // expected } try { m.setColumnMatrix(0, m); Assert.fail("Expecting MatrixDimensionMismatchException"); } catch (MatrixDimensionMismatchException ex) { // expected } } @Test public void testGetSetColumnMatrixLarge() { int n = 3 * BlockRealMatrix.BLOCK_SIZE; RealMatrix m = new BlockRealMatrix(n, n); RealMatrix sub = new BlockRealMatrix(n, 1).scalarAdd(1); m.setColumnMatrix(2, sub); for (int i = 0; i < n; ++i) { for (int j = 0; j < n; ++j) { if (j != 2) { Assert.assertEquals(0.0, m.getEntry(i, j), 0.0); } else { Assert.assertEquals(1.0, m.getEntry(i, j), 0.0); } } } Assert.assertEquals(sub, m.getColumnMatrix(2)); } @Test public void testGetRowVector() { RealMatrix m = new BlockRealMatrix(subTestData); RealVector mRow0 = new ArrayRealVector(subRow0[0]); RealVector mRow3 = new ArrayRealVector(subRow3[0]); Assert.assertEquals(mRow0, m.getRowVector(0)); Assert.assertEquals(mRow3, m.getRowVector(3)); try { m.getRowVector(-1); Assert.fail("Expecting OutOfRangeException"); } catch (OutOfRangeException ex) { // expected } try { m.getRowVector(4); Assert.fail("Expecting OutOfRangeException"); } catch (OutOfRangeException ex) { // expected } } @Test public void testSetRowVector() { RealMatrix m = new BlockRealMatrix(subTestData); RealVector mRow3 = new ArrayRealVector(subRow3[0]); Assert.assertNotSame(mRow3, m.getRowMatrix(0)); m.setRowVector(0, mRow3); Assert.assertEquals(mRow3, m.getRowVector(0)); try { m.setRowVector(-1, mRow3); Assert.fail("Expecting OutOfRangeException"); } catch (OutOfRangeException ex) { // expected } try { m.setRowVector(0, new ArrayRealVector(5)); Assert.fail("Expecting MatrixDimensionMismatchException"); } catch (MatrixDimensionMismatchException ex) { // expected } } @Test public void testGetSetRowVectorLarge() { int n = 3 * BlockRealMatrix.BLOCK_SIZE; RealMatrix m = new BlockRealMatrix(n, n); RealVector sub = new ArrayRealVector(n, 1.0); m.setRowVector(2, sub); for (int i = 0; i < n; ++i) { for (int j = 0; j < n; ++j) { if (i != 2) { Assert.assertEquals(0.0, m.getEntry(i, j), 0.0); } else { Assert.assertEquals(1.0, m.getEntry(i, j), 0.0); } } } Assert.assertEquals(sub, m.getRowVector(2)); } @Test public void testGetColumnVector() { RealMatrix m = new BlockRealMatrix(subTestData); RealVector mColumn1 = columnToVector(subColumn1); RealVector mColumn3 = columnToVector(subColumn3); Assert.assertEquals(mColumn1, m.getColumnVector(1)); Assert.assertEquals(mColumn3, m.getColumnVector(3)); try { m.getColumnVector(-1); Assert.fail("Expecting OutOfRangeException"); } catch (OutOfRangeException ex) { // expected } try { m.getColumnVector(4); Assert.fail("Expecting OutOfRangeException"); } catch (OutOfRangeException ex) { // expected } } @Test public void testSetColumnVector() { RealMatrix m = new BlockRealMatrix(subTestData); RealVector mColumn3 = columnToVector(subColumn3); Assert.assertNotSame(mColumn3, m.getColumnVector(1)); m.setColumnVector(1, mColumn3); Assert.assertEquals(mColumn3, m.getColumnVector(1)); try { m.setColumnVector(-1, mColumn3); Assert.fail("Expecting OutOfRangeException"); } catch (OutOfRangeException ex) { // expected } try { m.setColumnVector(0, new ArrayRealVector(5)); Assert.fail("Expecting MatrixDimensionMismatchException"); } catch (MatrixDimensionMismatchException ex) { // expected } } @Test public void testGetSetColumnVectorLarge() { int n = 3 * BlockRealMatrix.BLOCK_SIZE; RealMatrix m = new BlockRealMatrix(n, n); RealVector sub = new ArrayRealVector(n, 1.0); m.setColumnVector(2, sub); for (int i = 0; i < n; ++i) { for (int j = 0; j < n; ++j) { if (j != 2) { Assert.assertEquals(0.0, m.getEntry(i, j), 0.0); } else { Assert.assertEquals(1.0, m.getEntry(i, j), 0.0); } } } Assert.assertEquals(sub, m.getColumnVector(2)); } private RealVector columnToVector(double[][] column) { double[] data = new double[column.length]; for (int i = 0; i < data.length; ++i) { data[i] = column[i][0]; } return new ArrayRealVector(data, false); } @Test public void testGetRow() { RealMatrix m = new BlockRealMatrix(subTestData); checkArrays(subRow0[0], m.getRow(0)); checkArrays(subRow3[0], m.getRow(3)); try { m.getRow(-1); Assert.fail("Expecting OutOfRangeException"); } catch (OutOfRangeException ex) { // expected } try { m.getRow(4); Assert.fail("Expecting OutOfRangeException"); } catch (OutOfRangeException ex) { // expected } } @Test public void testSetRow() { RealMatrix m = new BlockRealMatrix(subTestData); Assert.assertTrue(subRow3[0][0] != m.getRow(0)[0]); m.setRow(0, subRow3[0]); checkArrays(subRow3[0], m.getRow(0)); try { m.setRow(-1, subRow3[0]); Assert.fail("Expecting OutOfRangeException"); } catch (OutOfRangeException ex) { // expected } try { m.setRow(0, new double[5]); Assert.fail("Expecting MatrixDimensionMismatchException"); } catch (MatrixDimensionMismatchException ex) { // expected } } @Test public void testGetSetRowLarge() { int n = 3 * BlockRealMatrix.BLOCK_SIZE; RealMatrix m = new BlockRealMatrix(n, n); double[] sub = new double[n]; Arrays.fill(sub, 1.0); m.setRow(2, sub); for (int i = 0; i < n; ++i) { for (int j = 0; j < n; ++j) { if (i != 2) { Assert.assertEquals(0.0, m.getEntry(i, j), 0.0); } else { Assert.assertEquals(1.0, m.getEntry(i, j), 0.0); } } } checkArrays(sub, m.getRow(2)); } @Test public void testGetColumn() { RealMatrix m = new BlockRealMatrix(subTestData); double[] mColumn1 = columnToArray(subColumn1); double[] mColumn3 = columnToArray(subColumn3); checkArrays(mColumn1, m.getColumn(1)); checkArrays(mColumn3, m.getColumn(3)); try { m.getColumn(-1); Assert.fail("Expecting OutOfRangeException"); } catch (OutOfRangeException ex) { // expected } try { m.getColumn(4); Assert.fail("Expecting OutOfRangeException"); } catch (OutOfRangeException ex) { // expected } } @Test public void testSetColumn() { RealMatrix m = new BlockRealMatrix(subTestData); double[] mColumn3 = columnToArray(subColumn3); Assert.assertTrue(mColumn3[0] != m.getColumn(1)[0]); m.setColumn(1, mColumn3); checkArrays(mColumn3, m.getColumn(1)); try { m.setColumn(-1, mColumn3); Assert.fail("Expecting OutOfRangeException"); } catch (OutOfRangeException ex) { // expected } try { m.setColumn(0, new double[5]); Assert.fail("Expecting MatrixDimensionMismatchException"); } catch (MatrixDimensionMismatchException ex) { // expected } } @Test public void testGetSetColumnLarge() { int n = 3 * BlockRealMatrix.BLOCK_SIZE; RealMatrix m = new BlockRealMatrix(n, n); double[] sub = new double[n]; Arrays.fill(sub, 1.0); m.setColumn(2, sub); for (int i = 0; i < n; ++i) { for (int j = 0; j < n; ++j) { if (j != 2) { Assert.assertEquals(0.0, m.getEntry(i, j), 0.0); } else { Assert.assertEquals(1.0, m.getEntry(i, j), 0.0); } } } checkArrays(sub, m.getColumn(2)); } private double[] columnToArray(double[][] column) { double[] data = new double[column.length]; for (int i = 0; i < data.length; ++i) { data[i] = column[i][0]; } return data; } private void checkArrays(double[] expected, double[] actual) { Assert.assertEquals(expected.length, actual.length); for (int i = 0; i < expected.length; ++i) { Assert.assertEquals(expected[i], actual[i], 0); } } @Test public void testEqualsAndHashCode() { BlockRealMatrix m = new BlockRealMatrix(testData); BlockRealMatrix m1 = m.copy(); BlockRealMatrix mt = m.transpose(); Assert.assertTrue(m.hashCode() != mt.hashCode()); Assert.assertEquals(m.hashCode(), m1.hashCode()); Assert.assertEquals(m, m); Assert.assertEquals(m, m1); Assert.assertFalse(m.equals(null)); Assert.assertFalse(m.equals(mt)); Assert.assertFalse(m.equals(new BlockRealMatrix(bigSingular))); } @Test public void testToString() { BlockRealMatrix m = new BlockRealMatrix(testData); Assert.assertEquals("BlockRealMatrix{{1.0,2.0,3.0},{2.0,5.0,3.0},{1.0,0.0,8.0}}", m.toString()); } @Test public void testSetSubMatrix() { BlockRealMatrix m = new BlockRealMatrix(testData); m.setSubMatrix(detData2,1,1); RealMatrix expected = new BlockRealMatrix (new double[][] {{1.0,2.0,3.0},{2.0,1.0,3.0},{1.0,2.0,4.0}}); Assert.assertEquals(expected, m); m.setSubMatrix(detData2,0,0); expected = new BlockRealMatrix (new double[][] {{1.0,3.0,3.0},{2.0,4.0,3.0},{1.0,2.0,4.0}}); Assert.assertEquals(expected, m); m.setSubMatrix(testDataPlus2,0,0); expected = new BlockRealMatrix (new double[][] {{3.0,4.0,5.0},{4.0,7.0,5.0},{3.0,2.0,10.0}}); Assert.assertEquals(expected, m); // javadoc example BlockRealMatrix matrix = new BlockRealMatrix (new double[][] {{1, 2, 3, 4}, {5, 6, 7, 8}, {9, 0, 1 , 2}}); matrix.setSubMatrix(new double[][] {{3, 4}, {5, 6}}, 1, 1); expected = new BlockRealMatrix (new double[][] {{1, 2, 3, 4}, {5, 3, 4, 8}, {9, 5 ,6, 2}}); Assert.assertEquals(expected, matrix); // dimension overflow try { m.setSubMatrix(testData,1,1); Assert.fail("expecting OutOfRangeException"); } catch (OutOfRangeException e) { // expected } // dimension underflow try { m.setSubMatrix(testData,-1,1); Assert.fail("expecting OutOfRangeException"); } catch (OutOfRangeException e) { // expected } try { m.setSubMatrix(testData,1,-1); Assert.fail("expecting OutOfRangeException"); } catch (OutOfRangeException e) { // expected } // null try { m.setSubMatrix(null,1,1); Assert.fail("expecting NullArgumentException"); } catch (NullArgumentException e) { // expected } // ragged try { m.setSubMatrix(new double[][] {{1}, {2, 3}}, 0, 0); Assert.fail("expecting MathIllegalArgumentException"); } catch (MathIllegalArgumentException e) { // expected } // empty try { m.setSubMatrix(new double[][] {{}}, 0, 0); Assert.fail("expecting MathIllegalArgumentException"); } catch (MathIllegalArgumentException e) { // expected } } @Test public void testWalk() { int rows = 150; int columns = 75; RealMatrix m = new BlockRealMatrix(rows, columns); m.walkInRowOrder(new SetVisitor()); GetVisitor getVisitor = new GetVisitor(); m.walkInOptimizedOrder(getVisitor); Assert.assertEquals(rows * columns, getVisitor.getCount()); m = new BlockRealMatrix(rows, columns); m.walkInRowOrder(new SetVisitor(), 1, rows - 2, 1, columns - 2); getVisitor = new GetVisitor(); m.walkInOptimizedOrder(getVisitor, 1, rows - 2, 1, columns - 2); Assert.assertEquals((rows - 2) * (columns - 2), getVisitor.getCount()); for (int i = 0; i < rows; ++i) { Assert.assertEquals(0.0, m.getEntry(i, 0), 0); Assert.assertEquals(0.0, m.getEntry(i, columns - 1), 0); } for (int j = 0; j < columns; ++j) { Assert.assertEquals(0.0, m.getEntry(0, j), 0); Assert.assertEquals(0.0, m.getEntry(rows - 1, j), 0); } m = new BlockRealMatrix(rows, columns); m.walkInColumnOrder(new SetVisitor()); getVisitor = new GetVisitor(); m.walkInOptimizedOrder(getVisitor); Assert.assertEquals(rows * columns, getVisitor.getCount()); m = new BlockRealMatrix(rows, columns); m.walkInColumnOrder(new SetVisitor(), 1, rows - 2, 1, columns - 2); getVisitor = new GetVisitor(); m.walkInOptimizedOrder(getVisitor, 1, rows - 2, 1, columns - 2); Assert.assertEquals((rows - 2) * (columns - 2), getVisitor.getCount()); for (int i = 0; i < rows; ++i) { Assert.assertEquals(0.0, m.getEntry(i, 0), 0); Assert.assertEquals(0.0, m.getEntry(i, columns - 1), 0); } for (int j = 0; j < columns; ++j) { Assert.assertEquals(0.0, m.getEntry(0, j), 0); Assert.assertEquals(0.0, m.getEntry(rows - 1, j), 0); } m = new BlockRealMatrix(rows, columns); m.walkInOptimizedOrder(new SetVisitor()); getVisitor = new GetVisitor(); m.walkInRowOrder(getVisitor); Assert.assertEquals(rows * columns, getVisitor.getCount()); m = new BlockRealMatrix(rows, columns); m.walkInOptimizedOrder(new SetVisitor(), 1, rows - 2, 1, columns - 2); getVisitor = new GetVisitor(); m.walkInRowOrder(getVisitor, 1, rows - 2, 1, columns - 2); Assert.assertEquals((rows - 2) * (columns - 2), getVisitor.getCount()); for (int i = 0; i < rows; ++i) { Assert.assertEquals(0.0, m.getEntry(i, 0), 0); Assert.assertEquals(0.0, m.getEntry(i, columns - 1), 0); } for (int j = 0; j < columns; ++j) { Assert.assertEquals(0.0, m.getEntry(0, j), 0); Assert.assertEquals(0.0, m.getEntry(rows - 1, j), 0); } m = new BlockRealMatrix(rows, columns); m.walkInOptimizedOrder(new SetVisitor()); getVisitor = new GetVisitor(); m.walkInColumnOrder(getVisitor); Assert.assertEquals(rows * columns, getVisitor.getCount()); m = new BlockRealMatrix(rows, columns); m.walkInOptimizedOrder(new SetVisitor(), 1, rows - 2, 1, columns - 2); getVisitor = new GetVisitor(); m.walkInColumnOrder(getVisitor, 1, rows - 2, 1, columns - 2); Assert.assertEquals((rows - 2) * (columns - 2), getVisitor.getCount()); for (int i = 0; i < rows; ++i) { Assert.assertEquals(0.0, m.getEntry(i, 0), 0); Assert.assertEquals(0.0, m.getEntry(i, columns - 1), 0); } for (int j = 0; j < columns; ++j) { Assert.assertEquals(0.0, m.getEntry(0, j), 0); Assert.assertEquals(0.0, m.getEntry(rows - 1, j), 0); } } @Test public void testSerial() { BlockRealMatrix m = new BlockRealMatrix(testData); Assert.assertEquals(m,TestUtils.serializeAndRecover(m)); } private static class SetVisitor extends DefaultRealMatrixChangingVisitor { @Override public double visit(int i, int j, double value) { return i + j / 1024.0; } } private static class GetVisitor extends DefaultRealMatrixPreservingVisitor { private int count = 0; @Override public void visit(int i, int j, double value) { ++count; Assert.assertEquals(i + j / 1024.0, value, 0.0); } public int getCount() { return count; } } //--------------- -----------------Protected methods /** verifies that two matrices are close (1-norm) */ protected void assertClose(RealMatrix m, RealMatrix n, double tolerance) { Assert.assertTrue(m.subtract(n).getNorm() < tolerance); } /** verifies that two vectors are close (sup norm) */ protected void assertClose(double[] m, double[] n, double tolerance) { if (m.length != n.length) { Assert.fail("vectors not same length"); } for (int i = 0; i < m.length; i++) { Assert.assertEquals(m[i], n[i], tolerance); } } private BlockRealMatrix createRandomMatrix(Random r, int rows, int columns) { BlockRealMatrix m = new BlockRealMatrix(rows, columns); for (int i = 0; i < rows; ++i) { for (int j = 0; j < columns; ++j) { m.setEntry(i, j, 200 * r.nextDouble() - 100); } } return m; } }
/* * EDACCResultsBrowserColumnSelection.java * * Created on 10.05.2010, 15:44:03 */ package edacc; import edacc.experiment.ExperimentResultsBrowserTableModel; import edacc.experiment.TableColumnSelector; import java.awt.Dimension; import java.awt.GridBagConstraints; import java.awt.GridBagLayout; import javax.swing.JCheckBox; import javax.swing.JPanel; import org.jdesktop.application.Action; /** * * @author simon */ public class EDACCResultsBrowserColumnSelection extends javax.swing.JDialog { private TableColumnSelector selector; private JCheckBox[] checkboxes; ExperimentResultsBrowserTableModel model; /** Creates new form EDACCResultsBrowserColumnSelection */ public EDACCResultsBrowserColumnSelection(java.awt.Frame parent, boolean modal, TableColumnSelector selector, ExperimentResultsBrowserTableModel model) { super(parent, modal); this.model = model; this.selector = selector; initComponents(); boolean[] visibility = selector.getColumnVisibility(); checkboxes = new JCheckBox[visibility.length]; pnlBasicColumns.setLayout(new GridBagLayout()); GridBagConstraints basicColumnsConstraints = new GridBagConstraints(); basicColumnsConstraints.gridx = 0; basicColumnsConstraints.weightx = 1; basicColumnsConstraints.fill = GridBagConstraints.HORIZONTAL; basicColumnsConstraints.gridy = 0; basicColumnsConstraints.gridheight = 1; basicColumnsConstraints.gridwidth = 1; basicColumnsConstraints.anchor = GridBagConstraints.WEST; GridBagConstraints resultPropertyColumnsConstraints = new GridBagConstraints(); resultPropertyColumnsConstraints.gridx = 0; resultPropertyColumnsConstraints.weightx = 1; resultPropertyColumnsConstraints.fill = GridBagConstraints.HORIZONTAL; resultPropertyColumnsConstraints.gridy = 0; resultPropertyColumnsConstraints.gridheight = 1; resultPropertyColumnsConstraints.gridwidth = 1; resultPropertyColumnsConstraints.anchor = GridBagConstraints.WEST; pnlResultPropertyColumns.setLayout(new GridBagLayout()); for (int i = 0; i < checkboxes.length; i++) { checkboxes[i] = new JCheckBox(selector.getColumnName(i), visibility[i]); checkboxes[i].setVisible(true); if (i < ExperimentResultsBrowserTableModel.COL_PROPERTY) { pnlBasicColumns.add(checkboxes[i], basicColumnsConstraints); basicColumnsConstraints.gridy++; } else { pnlResultPropertyColumns.add(checkboxes[i], resultPropertyColumnsConstraints); resultPropertyColumnsConstraints.gridy++; } } jPanel1.setPreferredSize(new Dimension(0, pnlBasicColumns.getPreferredSize().height + pnlResultPropertyColumns.getPreferredSize().height)); } /** This method is called from within the constructor to * initialize the form. * WARNING: Do NOT modify this code. The content of this method is * always regenerated by the Form Editor. */ @SuppressWarnings("unchecked") // <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents private void initComponents() { java.awt.GridBagConstraints gridBagConstraints; jScrollPane1 = new javax.swing.JScrollPane(); jPanel1 = new javax.swing.JPanel(); pnlBasicColumns = new javax.swing.JPanel(); pnlResultPropertyColumns = new javax.swing.JPanel(); jPanel2 = new javax.swing.JPanel(); jButton1 = new javax.swing.JButton(); jButton2 = new javax.swing.JButton(); setDefaultCloseOperation(javax.swing.WindowConstants.DISPOSE_ON_CLOSE); org.jdesktop.application.ResourceMap resourceMap = org.jdesktop.application.Application.getInstance(edacc.EDACCApp.class).getContext().getResourceMap(EDACCResultsBrowserColumnSelection.class); setTitle(resourceMap.getString("Form.title")); // NOI18N setName("Form"); // NOI18N jScrollPane1.setMaximumSize(new java.awt.Dimension(32767, 100)); jScrollPane1.setName("jScrollPane1"); // NOI18N jScrollPane1.getVerticalScrollBar().setUnitIncrement(30); jPanel1.setMaximumSize(new java.awt.Dimension(32767, 100)); jPanel1.setName("jPanel1"); // NOI18N jPanel1.setPreferredSize(new java.awt.Dimension(0, 300)); jPanel1.setLayout(new java.awt.GridBagLayout()); pnlBasicColumns.setBorder(javax.swing.BorderFactory.createTitledBorder(resourceMap.getString("pnlBasicColumns.border.title"))); // NOI18N pnlBasicColumns.setName("pnlBasicColumns"); // NOI18N javax.swing.GroupLayout pnlBasicColumnsLayout = new javax.swing.GroupLayout(pnlBasicColumns); pnlBasicColumns.setLayout(pnlBasicColumnsLayout); pnlBasicColumnsLayout.setHorizontalGroup( pnlBasicColumnsLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGap(0, 301, Short.MAX_VALUE) ); pnlBasicColumnsLayout.setVerticalGroup( pnlBasicColumnsLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGap(0, 0, Short.MAX_VALUE) ); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.gridy = 0; gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST; gridBagConstraints.weightx = 1.0; jPanel1.add(pnlBasicColumns, gridBagConstraints); pnlResultPropertyColumns.setBorder(javax.swing.BorderFactory.createTitledBorder(resourceMap.getString("pnlResultPropertyColumns.border.title"))); // NOI18N pnlResultPropertyColumns.setName("pnlResultPropertyColumns"); // NOI18N javax.swing.GroupLayout pnlResultPropertyColumnsLayout = new javax.swing.GroupLayout(pnlResultPropertyColumns); pnlResultPropertyColumns.setLayout(pnlResultPropertyColumnsLayout); pnlResultPropertyColumnsLayout.setHorizontalGroup( pnlResultPropertyColumnsLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGap(0, 301, Short.MAX_VALUE) ); pnlResultPropertyColumnsLayout.setVerticalGroup( pnlResultPropertyColumnsLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGap(0, 0, Short.MAX_VALUE) ); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.gridy = 1; gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST; gridBagConstraints.weightx = 1.0; jPanel1.add(pnlResultPropertyColumns, gridBagConstraints); jScrollPane1.setViewportView(jPanel1); getContentPane().add(jScrollPane1, java.awt.BorderLayout.CENTER); jPanel2.setName("jPanel2"); // NOI18N jPanel2.setPreferredSize(new java.awt.Dimension(399, 50)); javax.swing.ActionMap actionMap = org.jdesktop.application.Application.getInstance(edacc.EDACCApp.class).getContext().getActionMap(EDACCResultsBrowserColumnSelection.class, this); jButton1.setAction(actionMap.get("btnSelect")); // NOI18N jButton1.setText(resourceMap.getString("jButton1.text")); // NOI18N jButton1.setName("jButton1"); // NOI18N jButton2.setAction(actionMap.get("btnAbort")); // NOI18N jButton2.setText(resourceMap.getString("jButton2.text")); // NOI18N jButton2.setName("jButton2"); // NOI18N javax.swing.GroupLayout jPanel2Layout = new javax.swing.GroupLayout(jPanel2); jPanel2.setLayout(jPanel2Layout); jPanel2Layout.setHorizontalGroup( jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel2Layout.createSequentialGroup() .addContainerGap() .addComponent(jButton2, javax.swing.GroupLayout.PREFERRED_SIZE, 76, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, 160, Short.MAX_VALUE) .addComponent(jButton1, javax.swing.GroupLayout.PREFERRED_SIZE, 76, javax.swing.GroupLayout.PREFERRED_SIZE) .addContainerGap()) ); jPanel2Layout.setVerticalGroup( jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, jPanel2Layout.createSequentialGroup() .addContainerGap(16, Short.MAX_VALUE) .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jButton1) .addComponent(jButton2)) .addContainerGap()) ); getContentPane().add(jPanel2, java.awt.BorderLayout.SOUTH); pack(); }// </editor-fold>//GEN-END:initComponents @Action public void btnSelect() { boolean[] visibility = new boolean[checkboxes.length]; for (int i = 0; i < visibility.length; i++) { visibility[i] = checkboxes[i].isSelected(); } // model.setColumnVisibility(visibility, true); selector.setColumnVisiblity(visibility); this.dispose(); } @Action public void btnAbort() { this.dispose(); } // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JButton jButton1; private javax.swing.JButton jButton2; private javax.swing.JPanel jPanel1; private javax.swing.JPanel jPanel2; private javax.swing.JScrollPane jScrollPane1; private javax.swing.JPanel pnlBasicColumns; private javax.swing.JPanel pnlResultPropertyColumns; // End of variables declaration//GEN-END:variables }
/* Copyright 2009 Semantic Discovery, Inc. (www.semanticdiscovery.com) This file is part of the Semantic Discovery Toolkit. The Semantic Discovery Toolkit is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. The Semantic Discovery Toolkit is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with The Semantic Discovery Toolkit. If not, see <http://www.gnu.org/licenses/>. */ package org.sd.xml; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.Map; /** * Abstract base implementation of the tag stack interface. * <p> * @author Spence Koehler */ public abstract class BaseTagStack implements TagStack { private String _pathKey = null; private List<XmlLite.Tag> savedTags; private boolean useTagEquivalents = false; /** * Get a handle on the instance's tags list ordered from the (top) root * to the bottom. */ protected abstract List<XmlLite.Tag> getTagsList(); protected BaseTagStack() { this(false); } protected BaseTagStack(boolean useTagEquivalents) { this.useTagEquivalents = useTagEquivalents; } protected BaseTagStack(List<XmlLite.Tag> savedTags) { this(savedTags, false); } protected BaseTagStack(List<XmlLite.Tag> savedTags, boolean useTagEquivalents) { this.savedTags = savedTags; this.useTagEquivalents = useTagEquivalents; } /** * Get the tag at the given index or null if the index is out of range. */ public XmlLite.Tag getTag(int index) { XmlLite.Tag result = null; final List<XmlLite.Tag> tagsList = getTagsList(); if (index >= 0 && index < tagsList.size()) { result = tagsList.get(index); } return result; } /** * Get the names of the tags in this stack in the form t1.t2....tN, where * t1 is the root (top) of the stack and tN is the bottom of the stack. */ public String getPathKey() { if (_pathKey == null) { final PathKeyBuilder result = new PathKeyBuilder(); result.addAll(getTagsList()); _pathKey = result.getPathKey(); } return _pathKey; } /** * Get the names of the tags in this stack from the root (at index 0) down * to but not including the given index. */ public String getPathKey(int index) { return getPathKey(index, false); } public String getPathKey(int index, boolean useIndex) { if (_pathKey == null) { final PathKeyBuilder result = new PathKeyBuilder(useIndex); for (int i = 0; i < index; ++i) { final XmlLite.Tag tag = getTag(i); if (tag != null) result.add(tag); } _pathKey = result.getPathKey(); } return _pathKey; } /** * Get this stack's current depth. */ public int depth() { return getTagsList().size(); } /** * Determine the position at which the current stack has the given tag name. * * @param tagName an already lowercased tag name. * * @return the position of the tag name in the stack (where 0 is 'root') * or -1. */ public int hasTag(String tagName) { if (tagName == null) return -1; int result = 0; final List<XmlLite.Tag> tags = getTagsList(); for (XmlLite.Tag tag : tags) { if (tagName.equals(tag.name)) break; ++result; } return result >= tags.size() ? -1 : result; } /** * Determine the position at which the current stack has any of the given * tag names. * * @param tagNames an already lowercased tag name. * * @return the position of the tag name in the stack (where 0 is 'root') * or -1. */ public int hasTag(Set<String> tagNames) { if (tagNames == null) return -1; int result = 0; final List<XmlLite.Tag> tags = getTagsList(); for (XmlLite.Tag tag : tags) { if (tagNames.contains(tag.name)) break; ++result; } return result >= tags.size() ? -1 : result; } /** * Determine the position at which the current stack has the given tag instance. * * @param tag the tag instance to locate. * * @return the position of the tag in the stack (where 0 is 'root') or -1. */ public int hasTag(XmlLite.Tag tag) { if (tag == null) return -1; int result = 0; final List<XmlLite.Tag> tags = getTagsList(); for (XmlLite.Tag curTag : tags) { if (curTag == tag) break; ++result; } return result >= tags.size() ? -1 : result; } /** * Determine the position at which the current stack has the any tag which has * an attribute with the specified value. * @param name attribute name * @param value attribute value * @return the position of the tag in the stack (where 0 is 'root') or -1. */ public int hasTagAttribute(String name, String value) { if (name == null || value == null) return -1; int result = 0; final List<XmlLite.Tag> tags = getTagsList(); for (XmlLite.Tag tag : tags) { String val = tag.getAttribute(name); if(value.equals(val)) break; ++result; } return result >= tags.size() ? -1 : result; } /** * Determine the position at which the current stack has the any tag which has * an attribute with any of the specified values. * @param name attribute name * @param values set of attribute value * @return the position of the tag in the stack (where 0 is 'root') or -1. */ public int hasTagAttribute(String name, Set<String> values) { if (name == null || values == null || values.size() == 0) return -1; int result = 0; final List<XmlLite.Tag> tags = getTagsList(); for (XmlLite.Tag tag : tags) { String val = tag.getAttribute(name); if(values.contains(val)) break; ++result; } return result >= tags.size() ? -1 : result; } /** * Determine the position at which the current stack has the any tag which has * any of the specified attributes with any of the specified values. * @param attrs a map of valid attribute names and the attribute values they may contain * @return the position of the tag in the stack (where 0 is 'root') or -1. */ public int hasTagAttribute(Map<String,Set<String>> attrs) { if (attrs.size() == 0) return -1; int result = 0; final List<XmlLite.Tag> tags = getTagsList(); for (XmlLite.Tag tag : tags) { boolean found = false; for(Map.Entry<String,Set<String>> entry : attrs.entrySet()) { String name = entry.getKey(); Set<String> values = entry.getValue(); String val = tag.getAttribute(name); if(values.contains(val)) { found = true; break; } } if(found) break; ++result; } return result >= tags.size() ? -1 : result; } private boolean equivalentTags(XmlLite.Tag a, XmlLite.Tag b) { if(useTagEquivalents) { if(a == b) return true; else if(a == null && b != null) return false; boolean result = false; if (a != null && a.equals(b)) result = a.getChildNum() == b.getChildNum(); return result; } else return a == b; } /** * Find the deepest tag this stack has in common with the other. */ public XmlLite.Tag getDeepestCommonTag(TagStack other) { if (other == null) return null; final Iterator<XmlLite.Tag> myTagsIter = getTagsList().iterator(); final Iterator<XmlLite.Tag> otherTagsIter = ((BaseTagStack)other).getTagsList().iterator(); XmlLite.Tag result = null; while (myTagsIter.hasNext() && otherTagsIter.hasNext()) { final XmlLite.Tag myTag = myTagsIter.next(); final XmlLite.Tag otherTag = otherTagsIter.next(); if (!equivalentTags(myTag, otherTag)) { break; } else { result = myTag; } } return result; } /** * Find the index of the first divergent tag between this and the other stack. * * @return the index or -1 if the stacks don't intersect or depth() if they * don't diverge. */ public int findFirstDivergentTag(TagStack other) { if (other == null) return -1; final Iterator<XmlLite.Tag> myTagsIter = getTagsList().iterator(); final Iterator<XmlLite.Tag> otherTagsIter = ((BaseTagStack)other).getTagsList().iterator(); int result = -1; boolean match = true; while (myTagsIter.hasNext() && otherTagsIter.hasNext()) { final XmlLite.Tag myTag = myTagsIter.next(); final XmlLite.Tag otherTag = otherTagsIter.next(); ++result; if (!equivalentTags(myTag, otherTag)) { match = false; break; } } // if we run out of tags, these are common blocks if(match) result++; return result; } /** * Find the deepest index of the tag in this stack. * * @param tagName the already lowercased tag name to find. * * @return the deepest position of the tag name in the stack (where 0 is 'root') * or -1. */ public int findDeepestTag(String tagName) { int result = -1; if (tagName != null) { final List<XmlLite.Tag> tags = getTagsList(); for (result = tags.size() - 1; result >= 0; --result) { if (tagName.equals(tags.get(result).name)) { break; } } } return result; } /** * Find the deepest index of the tag in this stack which has the specified attribute name * @param tagName the already lowercased tag name to find. * @param attr attribute name * @return the deepest position of the tag name in the stack (where 0 is 'root') * or -1. */ public int findDeepestTag(String tagName, String attr) { int result = -1; if (tagName != null) { final List<XmlLite.Tag> tags = getTagsList(); for (result = tags.size() - 1; result >= 0; --result) { if (tagName.equals(tags.get(result).name) && tags.get(result).getAttribute(attr) != null) { break; } } } return result; } /** * Find the deepest index of any of the tags in the specified set in this stack. * @param tagNames the set of already lowercased tag names to find. * @return the deepest position of the first matching tag name in the stack (where 0 is 'root') * or -1. */ public int findDeepestTag(Set<String> tagNames) { int result = -1; if (tagNames != null && tagNames.size() > 0) { final List<XmlLite.Tag> tags = getTagsList(); for (result = tags.size() - 1; result >= 0; --result) { if (tagNames.contains(tags.get(result).name)) { break; } } } return result; } /** * Find the deepest index of any of the tag in this stack which have the specified attribute name * @param tagNames the set of already lowercased tag name to find. * @param attr attribute name * @return the deepest position of the tag name in the stack (where 0 is 'root') * or -1. */ public int findDeepestTag(Set<String> tagNames, String attr) { int result = -1; if (tagNames != null && tagNames.size() > 0) { final List<XmlLite.Tag> tags = getTagsList(); for (result = tags.size() - 1; result >= 0; --result) { if (tagNames.contains(tags.get(result).name) && tags.get(result).getAttribute(attr) != null) { break; } } } return result; } /** * Get tags that have been saved with this tagstack. */ public List<XmlLite.Tag> getSavedTags() { return savedTags; } /** * Get a string representation of this tag stack. */ public String toString() { return getPathKey(); } protected void clearPathKey() { _pathKey = null; } }
/* * JBoss, Home of Professional Open Source * Copyright 2013, Red Hat, Inc. and/or its affiliates, and individual * contributors by the @authors tag. See the copyright.txt in the * distribution for a full listing of individual contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jboss.as.quickstarts.datagrid.carmart.session; import java.io.UnsupportedEncodingException; import java.net.URLDecoder; import java.net.URLEncoder; import java.util.LinkedList; import java.util.List; import java.util.logging.Logger; import javax.enterprise.inject.Model; import javax.inject.Inject; import javax.transaction.TransactionManager; import org.infinispan.Cache; import org.infinispan.commons.api.BasicCache; import org.jboss.as.quickstarts.datagrid.carmart.model.Car; /** * Adds, retrieves, removes new cars from the cache. Also returns a list of cars * stored in the cache. * * @author Martin Gencur * */ @Model public class CarManager { private Logger log = Logger.getLogger(this.getClass().getName()); public static final String CACHE_NAME = "carcache"; public static final String CAR_NUMBERS_KEY = "carnumbers"; @Inject private CacheContainerProvider provider; private TransactionManager tm; private BasicCache<String, Object> carCache; private String carId; private Car car = new Car(); public CarManager() { } public String addNewCar() { carCache = provider.getCacheContainer().getCache(CACHE_NAME); tm = getTransactionManager(carCache); try { tm.begin(); List<String> carNumbers = getNumberPlateList(carCache); carNumbers.add(car.getNumberPlate()); carCache.put(CAR_NUMBERS_KEY, carNumbers); carCache.put(CarManager.encode(car.getNumberPlate()), car); tm.commit(); } catch (Exception e) { if (tm != null) { try { tm.rollback(); } catch (Exception e1) { } } } return "home"; } public String addNewCarWithRollback() { boolean throwInducedException = true; carCache = provider.getCacheContainer().getCache(CACHE_NAME); tm = getTransactionManager(carCache); try { tm.begin(); List<String> carNumbers = getNumberPlateList(carCache); carNumbers.add(car.getNumberPlate()); //store the new list of car numbers and then throw an exception -> roll-back //the car number list should not be stored in the cache carCache.put(CAR_NUMBERS_KEY, carNumbers); if (throwInducedException) throw new RuntimeException("Induced exception"); carCache.put(CarManager.encode(car.getNumberPlate()), car); tm.commit(); } catch (Exception e) { if (tm != null) { try { tm.rollback(); log.info("Rolled back due to: " + e.getMessage()); } catch (Exception e1) { } } } return "home"; } /** * Operate on a clone of car number list so that we can demonstrate transaction roll-back. */ @SuppressWarnings("unchecked") private List<String> getNumberPlateList(BasicCache<String, Object> carCacheLoc) { List<String> result = null; List<String> carNumberList = (List<String>) carCacheLoc.get(CAR_NUMBERS_KEY); if (carNumberList == null) { result = new LinkedList<String>(); } else { result = new LinkedList<String>(carNumberList); } return result; } public String showCarDetails(String numberPlate) { carCache = provider.getCacheContainer().getCache(CACHE_NAME); tm = getTransactionManager(carCache); try { tm.begin(); this.car = (Car) carCache.get(encode(numberPlate)); tm.commit(); } catch (Exception e) { if (tm != null) { try { tm.rollback(); } catch (Exception e1) { } } } return "showdetails"; } public List<String> getCarList() { List<String> result = null; carCache = provider.getCacheContainer().getCache(CACHE_NAME); tm = getTransactionManager(carCache); try { tm.begin(); // retrieve a list of number plates from the cache result = getNumberPlateList(carCache); tm.commit(); } catch (Exception e) { if (tm != null) { try { tm.rollback(); } catch (Exception e1) { } } } return result; } public String removeCar(String numberPlate) { carCache = provider.getCacheContainer().getCache(CACHE_NAME); tm = getTransactionManager(carCache); try { tm.begin(); carCache.remove(encode(numberPlate)); List<String> carNumbers = getNumberPlateList(carCache); carNumbers.remove(numberPlate); carCache.put(CAR_NUMBERS_KEY, carNumbers); tm.commit(); } catch (Exception e) { if (tm != null) { try { tm.rollback(); } catch (Exception e1) { } } } return null; } private TransactionManager getTransactionManager(BasicCache<?, ?> cache) { TransactionManager tm = ((Cache) cache).getAdvancedCache().getTransactionManager(); return tm; } public void setCarId(String carId) { this.carId = carId; } public String getCarId() { return carId; } public void setCar(Car car) { this.car = car; } public Car getCar() { return car; } public static String encode(String key) { try { return URLEncoder.encode(key, "UTF-8"); } catch (UnsupportedEncodingException e) { throw new RuntimeException(e); } } public static String decode(String key) { try { return URLDecoder.decode(key, "UTF-8"); } catch (UnsupportedEncodingException e) { throw new RuntimeException(e); } } }
/* * Copyright (C) 2008 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.gaecompat.repackaged.com.google.common.primitives; import static com.gaecompat.repackaged.com.google.common.base.Preconditions.checkArgument; import static com.gaecompat.repackaged.com.google.common.base.Preconditions.checkElementIndex; import static com.gaecompat.repackaged.com.google.common.base.Preconditions.checkNotNull; import static com.gaecompat.repackaged.com.google.common.base.Preconditions.checkPositionIndexes; import com.gaecompat.repackaged.com.google.common.annotations.GwtCompatible; import java.io.Serializable; import java.util.AbstractList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.RandomAccess; /** * Static utility methods pertaining to {@code byte} primitives, that are not * already found in either {@link Byte} or {@link Arrays}, <i>and interpret * bytes as neither signed nor unsigned</i>. The methods which specifically * treat bytes as signed or unsigned are found in {@link SignedBytes} and {@link * UnsignedBytes}. * * <p>See the Guava User Guide article on <a href= * "http://code.google.com/p/guava-libraries/wiki/PrimitivesExplained"> * primitive utilities</a>. * * @author Kevin Bourrillion * @since 1.0 */ // TODO(kevinb): how to prevent warning on UnsignedBytes when building GWT // javadoc? @GwtCompatible public final class Bytes { private Bytes() {} /** * Returns a hash code for {@code value}; equal to the result of invoking * {@code ((Byte) value).hashCode()}. * * @param value a primitive {@code byte} value * @return a hash code for the value */ public static int hashCode(byte value) { return value; } /** * Returns {@code true} if {@code target} is present as an element anywhere in * {@code array}. * * @param array an array of {@code byte} values, possibly empty * @param target a primitive {@code byte} value * @return {@code true} if {@code array[i] == target} for some value of {@code * i} */ public static boolean contains(byte[] array, byte target) { for (byte value : array) { if (value == target) { return true; } } return false; } /** * Returns the index of the first appearance of the value {@code target} in * {@code array}. * * @param array an array of {@code byte} values, possibly empty * @param target a primitive {@code byte} value * @return the least index {@code i} for which {@code array[i] == target}, or * {@code -1} if no such index exists. */ public static int indexOf(byte[] array, byte target) { return indexOf(array, target, 0, array.length); } // TODO(kevinb): consider making this public private static int indexOf( byte[] array, byte target, int start, int end) { for (int i = start; i < end; i++) { if (array[i] == target) { return i; } } return -1; } /** * Returns the start position of the first occurrence of the specified {@code * target} within {@code array}, or {@code -1} if there is no such occurrence. * * <p>More formally, returns the lowest index {@code i} such that {@code * java.util.Arrays.copyOfRange(array, i, i + target.length)} contains exactly * the same elements as {@code target}. * * @param array the array to search for the sequence {@code target} * @param target the array to search for as a sub-sequence of {@code array} */ public static int indexOf(byte[] array, byte[] target) { checkNotNull(array, "array"); checkNotNull(target, "target"); if (target.length == 0) { return 0; } outer: for (int i = 0; i < array.length - target.length + 1; i++) { for (int j = 0; j < target.length; j++) { if (array[i + j] != target[j]) { continue outer; } } return i; } return -1; } /** * Returns the index of the last appearance of the value {@code target} in * {@code array}. * * @param array an array of {@code byte} values, possibly empty * @param target a primitive {@code byte} value * @return the greatest index {@code i} for which {@code array[i] == target}, * or {@code -1} if no such index exists. */ public static int lastIndexOf(byte[] array, byte target) { return lastIndexOf(array, target, 0, array.length); } // TODO(kevinb): consider making this public private static int lastIndexOf( byte[] array, byte target, int start, int end) { for (int i = end - 1; i >= start; i--) { if (array[i] == target) { return i; } } return -1; } /** * Returns the values from each provided array combined into a single array. * For example, {@code concat(new byte[] {a, b}, new byte[] {}, new * byte[] {c}} returns the array {@code {a, b, c}}. * * @param arrays zero or more {@code byte} arrays * @return a single array containing all the values from the source arrays, in * order */ public static byte[] concat(byte[]... arrays) { int length = 0; for (byte[] array : arrays) { length += array.length; } byte[] result = new byte[length]; int pos = 0; for (byte[] array : arrays) { System.arraycopy(array, 0, result, pos, array.length); pos += array.length; } return result; } /** * Returns an array containing the same values as {@code array}, but * guaranteed to be of a specified minimum length. If {@code array} already * has a length of at least {@code minLength}, it is returned directly. * Otherwise, a new array of size {@code minLength + padding} is returned, * containing the values of {@code array}, and zeroes in the remaining places. * * @param array the source array * @param minLength the minimum length the returned array must guarantee * @param padding an extra amount to "grow" the array by if growth is * necessary * @throws IllegalArgumentException if {@code minLength} or {@code padding} is * negative * @return an array containing the values of {@code array}, with guaranteed * minimum length {@code minLength} */ public static byte[] ensureCapacity( byte[] array, int minLength, int padding) { checkArgument(minLength >= 0, "Invalid minLength: %s", minLength); checkArgument(padding >= 0, "Invalid padding: %s", padding); return (array.length < minLength) ? copyOf(array, minLength + padding) : array; } // Arrays.copyOf() requires Java 6 private static byte[] copyOf(byte[] original, int length) { byte[] copy = new byte[length]; System.arraycopy(original, 0, copy, 0, Math.min(original.length, length)); return copy; } /** * Returns an array containing each value of {@code collection}, converted to * a {@code byte} value in the manner of {@link Number#byteValue}. * * <p>Elements are copied from the argument collection as if by {@code * collection.toArray()}. Calling this method is as thread-safe as calling * that method. * * @param collection a collection of {@code Number} instances * @return an array containing the same values as {@code collection}, in the * same order, converted to primitives * @throws NullPointerException if {@code collection} or any of its elements * is null * @since 1.0 (parameter was {@code Collection<Byte>} before 12.0) */ public static byte[] toArray(Collection<? extends Number> collection) { if (collection instanceof ByteArrayAsList) { return ((ByteArrayAsList) collection).toByteArray(); } Object[] boxedArray = collection.toArray(); int len = boxedArray.length; byte[] array = new byte[len]; for (int i = 0; i < len; i++) { // checkNotNull for GWT (do not optimize) array[i] = ((Number) checkNotNull(boxedArray[i])).byteValue(); } return array; } /** * Returns a fixed-size list backed by the specified array, similar to {@link * Arrays#asList(Object[])}. The list supports {@link List#set(int, Object)}, * but any attempt to set a value to {@code null} will result in a {@link * NullPointerException}. * * <p>The returned list maintains the values, but not the identities, of * {@code Byte} objects written to or read from it. For example, whether * {@code list.get(0) == list.get(0)} is true for the returned list is * unspecified. * * @param backingArray the array to back the list * @return a list view of the array */ public static List<Byte> asList(byte... backingArray) { if (backingArray.length == 0) { return Collections.emptyList(); } return new ByteArrayAsList(backingArray); } @GwtCompatible private static class ByteArrayAsList extends AbstractList<Byte> implements RandomAccess, Serializable { final byte[] array; final int start; final int end; ByteArrayAsList(byte[] array) { this(array, 0, array.length); } ByteArrayAsList(byte[] array, int start, int end) { this.array = array; this.start = start; this.end = end; } @Override public int size() { return end - start; } @Override public boolean isEmpty() { return false; } @Override public Byte get(int index) { checkElementIndex(index, size()); return array[start + index]; } @Override public boolean contains(Object target) { // Overridden to prevent a ton of boxing return (target instanceof Byte) && Bytes.indexOf(array, (Byte) target, start, end) != -1; } @Override public int indexOf(Object target) { // Overridden to prevent a ton of boxing if (target instanceof Byte) { int i = Bytes.indexOf(array, (Byte) target, start, end); if (i >= 0) { return i - start; } } return -1; } @Override public int lastIndexOf(Object target) { // Overridden to prevent a ton of boxing if (target instanceof Byte) { int i = Bytes.lastIndexOf(array, (Byte) target, start, end); if (i >= 0) { return i - start; } } return -1; } @Override public Byte set(int index, Byte element) { checkElementIndex(index, size()); byte oldValue = array[start + index]; // checkNotNull for GWT (do not optimize) array[start + index] = checkNotNull(element); return oldValue; } @Override public List<Byte> subList(int fromIndex, int toIndex) { int size = size(); checkPositionIndexes(fromIndex, toIndex, size); if (fromIndex == toIndex) { return Collections.emptyList(); } return new ByteArrayAsList(array, start + fromIndex, start + toIndex); } @Override public boolean equals(Object object) { if (object == this) { return true; } if (object instanceof ByteArrayAsList) { ByteArrayAsList that = (ByteArrayAsList) object; int size = size(); if (that.size() != size) { return false; } for (int i = 0; i < size; i++) { if (array[start + i] != that.array[that.start + i]) { return false; } } return true; } return super.equals(object); } @Override public int hashCode() { int result = 1; for (int i = start; i < end; i++) { result = 31 * result + Bytes.hashCode(array[i]); } return result; } @Override public String toString() { StringBuilder builder = new StringBuilder(size() * 5); builder.append('[').append(array[start]); for (int i = start + 1; i < end; i++) { builder.append(", ").append(array[i]); } return builder.append(']').toString(); } byte[] toByteArray() { // Arrays.copyOfRange() is not available under GWT int size = size(); byte[] result = new byte[size]; System.arraycopy(array, start, result, 0, size); return result; } private static final long serialVersionUID = 0; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.tests.util.flink; import org.apache.flink.api.common.JobID; import org.apache.flink.configuration.Configuration; import org.apache.flink.configuration.GlobalConfiguration; import org.apache.flink.configuration.UnmodifiableConfiguration; import org.apache.flink.tests.util.AutoClosableProcess; import org.apache.flink.tests.util.TestUtils; import org.apache.flink.util.ExceptionUtils; import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.JsonNode; import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper; import okhttp3.OkHttpClient; import okhttp3.Request; import okhttp3.Response; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.BufferedReader; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStreamReader; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.concurrent.TimeUnit; import java.util.function.Consumer; import java.util.function.Function; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; import java.util.stream.Stream; /** * A wrapper around a Flink distribution. */ final class FlinkDistribution { private static final Logger LOG = LoggerFactory.getLogger(FlinkDistribution.class); private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); private final Path opt; private final Path lib; private final Path conf; private final Path log; private final Path bin; private final Path plugins; private final Configuration defaultConfig; FlinkDistribution(Path distributionDir) { bin = distributionDir.resolve("bin"); opt = distributionDir.resolve("opt"); lib = distributionDir.resolve("lib"); conf = distributionDir.resolve("conf"); log = distributionDir.resolve("log"); plugins = distributionDir.resolve("plugins"); defaultConfig = new UnmodifiableConfiguration(GlobalConfiguration.loadConfiguration(conf.toAbsolutePath().toString())); } public void startJobManager() throws IOException { LOG.info("Starting Flink JobManager."); AutoClosableProcess.runBlocking(bin.resolve("jobmanager.sh").toAbsolutePath().toString(), "start"); } public void startTaskManager() throws IOException { LOG.info("Starting Flink TaskManager."); AutoClosableProcess.runBlocking(bin.resolve("taskmanager.sh").toAbsolutePath().toString(), "start"); } public void startFlinkCluster() throws IOException { LOG.info("Starting Flink cluster."); AutoClosableProcess.runBlocking(bin.resolve("start-cluster.sh").toAbsolutePath().toString()); final OkHttpClient client = new OkHttpClient(); final Request request = new Request.Builder() .get() .url("http://localhost:8081/taskmanagers") .build(); Exception reportedException = null; for (int retryAttempt = 0; retryAttempt < 30; retryAttempt++) { try (Response response = client.newCall(request).execute()) { if (response.isSuccessful()) { final String json = response.body().string(); final JsonNode taskManagerList = OBJECT_MAPPER.readTree(json) .get("taskmanagers"); if (taskManagerList != null && taskManagerList.size() > 0) { LOG.info("Dispatcher REST endpoint is up."); return; } } } catch (IOException ioe) { reportedException = ExceptionUtils.firstOrSuppressed(ioe, reportedException); } LOG.info("Waiting for dispatcher REST endpoint to come up..."); try { Thread.sleep(1000); } catch (InterruptedException e) { Thread.currentThread().interrupt(); reportedException = ExceptionUtils.firstOrSuppressed(e, reportedException); } } throw new AssertionError("Dispatcher REST endpoint did not start in time.", reportedException); } public void stopFlinkCluster() throws IOException { LOG.info("Stopping Flink cluster."); AutoClosableProcess.runBlocking(bin.resolve("stop-cluster.sh").toAbsolutePath().toString()); } public JobID submitJob(final JobSubmission jobSubmission) throws IOException { final List<String> commands = new ArrayList<>(4); commands.add(bin.resolve("flink").toString()); commands.add("run"); if (jobSubmission.isDetached()) { commands.add("-d"); } if (jobSubmission.getParallelism() > 0) { commands.add("-p"); commands.add(String.valueOf(jobSubmission.getParallelism())); } commands.add(jobSubmission.getJar().toAbsolutePath().toString()); commands.addAll(jobSubmission.getArguments()); LOG.info("Running {}.", commands.stream().collect(Collectors.joining(" "))); final Pattern pattern = jobSubmission.isDetached() ? Pattern.compile("Job has been submitted with JobID (.*)") : Pattern.compile("Job with JobID (.*) has finished."); final CompletableFuture<String> rawJobIdFuture = new CompletableFuture<>(); final Consumer<String> stdoutProcessor = string -> { LOG.info(string); Matcher matcher = pattern.matcher(string); if (matcher.matches()) { rawJobIdFuture.complete(matcher.group(1)); } }; try (AutoClosableProcess flink = AutoClosableProcess.create(commands.toArray(new String[0])).setStdoutProcessor(stdoutProcessor).runNonBlocking()) { if (jobSubmission.isDetached()) { try { flink.getProcess().waitFor(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } } try { return JobID.fromHexString(rawJobIdFuture.get(1, TimeUnit.MINUTES)); } catch (Exception e) { throw new IOException("Could not determine Job ID.", e); } } } public void submitSQLJob(SQLJobSubmission job) throws IOException { final List<String> commands = new ArrayList<>(); commands.add(bin.resolve("sql-client.sh").toAbsolutePath().toString()); commands.add("embedded"); job.getDefaultEnvFile().ifPresent(defaultEnvFile -> { commands.add("--defaults"); commands.add(defaultEnvFile); }); job.getSessionEnvFile().ifPresent(sessionEnvFile -> { commands.add("--environment"); commands.add(sessionEnvFile); }); for (String jar : job.getJars()) { commands.add("--jar"); commands.add(jar); } commands.add("--update"); commands.add("\"" + job.getSQL() + "\""); AutoClosableProcess.runBlocking(commands.toArray(new String[0])); } public void performJarOperation(JarOperation operation) throws IOException { final Path source = mapJarLocationToPath(operation.getSource()); final Path target = mapJarLocationToPath(operation.getTarget()); final Optional<Path> jarOptional; try (Stream<Path> files = Files.walk(source)) { jarOptional = files .filter(path -> path.getFileName().toString().startsWith(operation.getJarNamePrefix())) .findFirst(); } if (jarOptional.isPresent()) { final Path sourceJar = jarOptional.get(); final Path targetJar = target.resolve(operation.getJarNamePrefix()).resolve(sourceJar.getFileName()); Files.createDirectories(targetJar.getParent()); switch (operation.getOperationType()){ case COPY: Files.copy(sourceJar, targetJar); break; case MOVE: Files.move(sourceJar, targetJar); if (operation.getSource() == JarLocation.PLUGINS) { // plugin system crashes on startup if a plugin directory is empty Files.delete(sourceJar.getParent()); } break; default: throw new IllegalStateException(); } } else { throw new FileNotFoundException("No jar could be found matching the pattern " + operation.getJarNamePrefix() + "."); } } private Path mapJarLocationToPath(JarLocation location) { switch (location) { case LIB: return lib; case OPT: return opt; case PLUGINS: return plugins; default: throw new IllegalStateException(); } } public void appendConfiguration(Configuration config) throws IOException { final Configuration mergedConfig = new Configuration(); mergedConfig.addAll(defaultConfig); mergedConfig.addAll(config); final List<String> configurationLines = mergedConfig.toMap().entrySet().stream() .map(entry -> entry.getKey() + ": " + entry.getValue()) .collect(Collectors.toList()); Files.write(conf.resolve("flink-conf.yaml"), configurationLines); } public void setTaskExecutorHosts(Collection<String> taskExecutorHosts) throws IOException { Files.write(conf.resolve("slaves"), taskExecutorHosts); } public Stream<String> searchAllLogs(Pattern pattern, Function<Matcher, String> matchProcessor) throws IOException { final List<String> matches = new ArrayList<>(2); try (Stream<Path> logFilesStream = Files.list(log)) { final Iterator<Path> logFiles = logFilesStream.iterator(); while (logFiles.hasNext()) { final Path logFile = logFiles.next(); if (!logFile.getFileName().toString().endsWith(".log")) { // ignore logs for previous runs that have a number suffix continue; } try (BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(logFile.toFile()), StandardCharsets.UTF_8))) { String line; while ((line = br.readLine()) != null) { Matcher matcher = pattern.matcher(line); if (matcher.matches()) { matches.add(matchProcessor.apply(matcher)); } } } } } return matches.stream(); } public void copyLogsTo(Path targetDirectory) throws IOException { Files.createDirectories(targetDirectory); TestUtils.copyDirectory(log, targetDirectory); } }
// Copyright 2017 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.components.browser_ui.widget.listmenu; import android.content.Context; import android.content.res.TypedArray; import android.graphics.Color; import android.graphics.Rect; import android.graphics.drawable.ColorDrawable; import android.text.TextUtils; import android.util.AttributeSet; import android.view.View; import org.chromium.base.CollectionUtil; import org.chromium.base.ObserverList; import org.chromium.components.browser_ui.widget.R; import org.chromium.ui.widget.AnchoredPopupWindow; import org.chromium.ui.widget.ChromeImageButton; /** * A menu button meant to be used with modern lists throughout Chrome. Will automatically show and * anchor a popup on press and will rely on a delegate for positioning and content of the popup. * You can define your own content description for accessibility through the * android:contentDescription parameter in the XML layout of the ListMenuButton. The default content * description that corresponds to * context.getString(R.string.accessibility_list_menu_button, "") is used otherwise. */ public class ListMenuButton extends ChromeImageButton implements AnchoredPopupWindow.LayoutObserver { /** * A listener that is notified when the popup menu is shown or dismissed. */ @FunctionalInterface public interface PopupMenuShownListener { void onPopupMenuShown(); default void onPopupMenuDismissed() {} } private final int mMenuMaxWidth; private final boolean mMenuVerticalOverlapAnchor; private final boolean mMenuHorizontalOverlapAnchor; private AnchoredPopupWindow mPopupMenu; private ListMenuButtonDelegate mDelegate; private ObserverList<PopupMenuShownListener> mPopupListeners = new ObserverList<>(); private boolean mTryToFitLargestItem; private boolean mPositionedAtEnd; /** * Creates a new {@link ListMenuButton}. * * @param context The {@link Context} used to build the visuals from. * @param attrs The specific {@link AttributeSet} used to build the button. */ public ListMenuButton(Context context, AttributeSet attrs) { super(context, attrs); TypedArray a = context.obtainStyledAttributes(attrs, R.styleable.ListMenuButton); mMenuMaxWidth = a.getDimensionPixelSize(R.styleable.ListMenuButton_menuMaxWidth, getResources().getDimensionPixelSize(R.dimen.list_menu_width)); mMenuHorizontalOverlapAnchor = a.getBoolean(R.styleable.ListMenuButton_menuHorizontalOverlapAnchor, true); mMenuVerticalOverlapAnchor = a.getBoolean(R.styleable.ListMenuButton_menuVerticalOverlapAnchor, true); mPositionedAtEnd = a.getBoolean(R.styleable.ListMenuButton_menuPositionedAtEnd, true); a.recycle(); } /** * Text that represents the item this menu button is related to. This will affect the content * description of the view {@see #setContentDescription(CharSequence)}. * * @param context The string representation of the list item this button represents. */ public void setContentDescriptionContext(String context) { if (TextUtils.isEmpty(context)) { setContentDescription( getContext().getResources().getString(R.string.accessibility_toolbar_btn_menu)); return; } setContentDescription(getContext().getResources().getString( R.string.accessibility_list_menu_button, context)); } /** * Sets the delegate this menu will rely on for populating the popup menu and handling selection * responses. The OnClickListener will be overridden by default to show menu. The menu will not * show or work without the delegate. * * @param delegate The {@link ListMenuButtonDelegate} to use for menu creation and selection * handling. */ public void setDelegate(ListMenuButtonDelegate delegate) { setDelegate(delegate, true); } /** * Sets the delegate this menu will rely on for populating the popup menu and handling selection * responses. The menu will not * show or work without the delegate. * * @param delegate The {@link ListMenuButtonDelegate} to use for menu creation and selection * handling. * @param overrideOnClickListener Whether to override the click listener which can trigger * the popup menu. */ public void setDelegate(ListMenuButtonDelegate delegate, boolean overrideOnClickListener) { dismiss(); mDelegate = delegate; if (overrideOnClickListener) { setOnClickListener((view) -> showMenu()); } } /** * Called to dismiss any popup menu that might be showing for this button. */ public void dismiss() { if (mPopupMenu != null) { mPopupMenu.dismiss(); } } /** * Shows a popupWindow built by ListMenuButton */ public void showMenu() { initPopupWindow(); mPopupMenu.show(); notifyPopupListeners(true); } /** * Init the popup window with provided attributes, called before {@link #showMenu()} */ private void initPopupWindow() { if (mDelegate == null) throw new IllegalStateException("Delegate was not set."); ListMenu menu = mDelegate.getListMenu(); menu.addContentViewClickRunnable(this::dismiss); final View contentView = menu.getContentView(); mPopupMenu = new AnchoredPopupWindow(getContext(), this, new ColorDrawable(Color.TRANSPARENT), contentView, mDelegate.getRectProvider(this)); mPopupMenu.setVerticalOverlapAnchor(mMenuVerticalOverlapAnchor); mPopupMenu.setHorizontalOverlapAnchor(mMenuHorizontalOverlapAnchor); mPopupMenu.setMaxWidth(mMenuMaxWidth); if (mTryToFitLargestItem) { // Content width includes the padding around the items, so add it here. final int lateralPadding = contentView.getPaddingLeft() + contentView.getPaddingRight(); mPopupMenu.setDesiredContentWidth(menu.getMaxItemWidth() + lateralPadding); } mPopupMenu.setFocusable(true); mPopupMenu.setLayoutObserver(this); mPopupMenu.addOnDismissListener(() -> { mPopupMenu = null; notifyPopupListeners(false); }); // This should be called explicitly since it is not a default behavior on Android S // in split-screen mode. See crbug.com/1246956. mPopupMenu.setOutsideTouchable(true); } /** * Adds a listener which will be notified when the popup menu is shown. * * @param l The listener of interest. */ public void addPopupListener(PopupMenuShownListener l) { mPopupListeners.addObserver(l); } /** * Removes a popup menu listener. * * @param l The listener of interest. */ public void removePopupListener(PopupMenuShownListener l) { mPopupListeners.removeObserver(l); } // AnchoredPopupWindow.LayoutObserver implementation. @Override public void onPreLayoutChange( boolean positionBelow, int x, int y, int width, int height, Rect anchorRect) { if (mPositionedAtEnd) { mPopupMenu.setAnimationStyle( positionBelow ? R.style.EndIconMenuAnim : R.style.EndIconMenuAnimBottom); } else { mPopupMenu.setAnimationStyle( positionBelow ? R.style.StartIconMenuAnim : R.style.StartIconMenuAnimBottom); } } /** * Determines whether to try to fit the largest menu item without overflowing by measuring the * exact width of each item. * * WARNING: do not call when the menu list has more than a handful of items, the performance * will be terrible since it measures every single item. * * @param value Determines whether to try to exactly fit the width of the largest item in the * list. */ public void tryToFitLargestItem(boolean value) { mTryToFitLargestItem = value; } // View implementation. @Override protected void onFinishInflate() { super.onFinishInflate(); if (TextUtils.isEmpty(getContentDescription())) setContentDescriptionContext(""); } @Override protected void onDetachedFromWindow() { dismiss(); super.onDetachedFromWindow(); } /** * Notify all of the PopupMenuShownListeners of a popup menu action. * @param shown Whether the popup menu was shown or dismissed. */ private void notifyPopupListeners(boolean shown) { CollectionUtil.forEach(mPopupListeners.mObservers, l -> { if (shown) { l.onPopupMenuShown(); } else { l.onPopupMenuDismissed(); } }); } }
/* * Copyright 2017 Axway Software * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.axway.ats.uiengine.utilities.swing; import java.awt.Component; import javax.swing.JComboBox; import javax.swing.JTree; import javax.swing.text.JTextComponent; import com.axway.ats.common.PublicAtsApi; import com.axway.ats.uiengine.configuration.UiEngineConfigurator; import com.axway.ats.uiengine.elements.UiElement; import com.axway.ats.uiengine.elements.UiElementProperties; import com.axway.ats.uiengine.elements.swing.SwingElementLocator; import com.axway.ats.uiengine.exceptions.ElementNotFoundException; import com.axway.ats.uiengine.exceptions.NotSupportedOperationException; import com.axway.ats.uiengine.exceptions.VerificationException; import com.axway.ats.uiengine.utilities.UiEngineUtilities; @PublicAtsApi public class SwingElementState { private UiElement element; private UiElementProperties elementProperties; private static final int SLEEP_PERIOD = 100; private Exception lastNotFoundException; /** * @param uiElement the element of interest */ public SwingElementState( UiElement uiElement ) { this.element = uiElement; this.elementProperties = uiElement.getElementProperties(); } /** * Verifies the element exist * * @throws VerificationException if the verification fails */ @PublicAtsApi public void verifyExist() { boolean exists = isElementPresent(); if (!exists) { throw new VerificationException(getElementDescription() + " does not exist while it is expected to exist", lastNotFoundException); } } /** * Verifies the element does NOT exist * * @throws VerificationException if the verification fails */ @PublicAtsApi public void verifyNotExist() { boolean exists = isElementPresent(); if (exists) { throw new VerificationException(getElementDescription() + " exists while it is expected to not exist", lastNotFoundException); } } /** * Verifies the element is visible * * @throws VerificationException if the verification fails */ @PublicAtsApi public void verifyVisible() { boolean visible = isElementVisible(); if (!visible) { throw new VerificationException(getElementDescription() + " is invisible while it is expected to be visible", lastNotFoundException); } } /** * Verifies the element is not visible * * @throws VerificationException if the verification fails */ @PublicAtsApi public void verifyNotVisible() { boolean visible = isElementVisible(); if (visible) { throw new VerificationException(getElementDescription() + " is visible while it is expected to be invisible", lastNotFoundException); } } /** * Verifies the element is enabled * * @throws VerificationException if the verification fails */ @PublicAtsApi public void verifyEnabled() { boolean enabled = isElementEnabled(); if (!enabled) { throw new VerificationException(getElementDescription() + " is disabled while it is expected to be enabled", lastNotFoundException); } } /** * Verifies the element is disabled * * @throws VerificationException if the verification fails */ @PublicAtsApi public void verifyDisabled() { boolean enabled = isElementEnabled(); if (enabled) { throw new VerificationException(getElementDescription() + " is enabled while it is expected to be disabled", lastNotFoundException); } } /** * Verifies the element is editable * * @throws VerificationException if the verification fails */ @PublicAtsApi public void verifyEditable() { boolean editable = isElementEditable(); if (!editable) { throw new VerificationException(getElementDescription() + " is not editable while it is expected to be editable", lastNotFoundException); } } /** * Verifies the element is not editable * * @throws VerificationException if the verification fails */ @PublicAtsApi public void verifyNotEditable() { boolean editable = isElementEditable(); if (editable) { throw new VerificationException(getElementDescription() + " is editable while it is expected to be not editable", lastNotFoundException); } } /** * Waits for a period of time (check the 'elementStateChangeDelay' property) the element to become visible * * @throws VerificationException if the element does not become visible * for the default waiting period (check the 'elementStateChangeDelay' property) */ @PublicAtsApi public void waitToBecomeVisible() { int millis = UiEngineConfigurator.getInstance().getElementStateChangeDelay(); long endTime = System.currentTimeMillis() + millis; do { if (isElementVisible()) { return; } UiEngineUtilities.sleep(SLEEP_PERIOD); } while (endTime - System.currentTimeMillis() > 0); throw new VerificationException("Failed to verify that element is visible within " + millis + " ms" + getElementDescription(), lastNotFoundException); } /** * Waits for a period of time (check the 'elementStateChangeDelay' property) the element to become invisible * * @throws VerificationException if the element does not become invisible * for the default waiting period (check the 'elementStateChangeDelay' property) */ @PublicAtsApi public void waitToBecomeNotVisible() { int millis = UiEngineConfigurator.getInstance().getElementStateChangeDelay(); long endTime = System.currentTimeMillis() + millis; do { if (!isElementVisible()) { return; } UiEngineUtilities.sleep(SLEEP_PERIOD); } while (endTime - System.currentTimeMillis() > 0); throw new VerificationException("Failed to verify that element is invisible within " + millis + " ms" + getElementDescription(), lastNotFoundException); } /** * Waits for a period of time (check the 'elementStateChangeDelay' property) the element to become enabled * * @throws VerificationException if the element does not become enabled * for the default waiting period (check the 'elementStateChangeDelay' property) */ @PublicAtsApi public void waitToBecomeEnabled() { waitToBecomeEnabled(UiEngineConfigurator.getInstance().getElementStateChangeDelay()); } /** * Waits for a period of time (check the 'elementStateChangeDelay' property) the element to become disabled * * @throws VerificationException if the element does not become disabled * for the default waiting period (check the 'elementStateChangeDelay' property) */ @PublicAtsApi public void waitToBecomeDisabled() { waitToBecomeDisabled(UiEngineConfigurator.getInstance().getElementStateChangeDelay()); } /** * Waits for a period of time (check the 'elementStateChangeDelay' property) the element to become existing * * @throws VerificationException if the element does not become existing * for the default waiting period (check the 'elementStateChangeDelay' property) */ @PublicAtsApi public void waitToBecomeExisting() { waitToBecomeExisting(UiEngineConfigurator.getInstance().getElementStateChangeDelay()); } /** * Waits for a period of time the element to become existing * * @param millis milliseconds to wait * @throws VerificationException if the element does not become existing for the specified period */ @PublicAtsApi public void waitToBecomeExisting( int millis ) { long endTime = System.currentTimeMillis() + millis; do { if (isElementPresent()) { return; } UiEngineUtilities.sleep(SLEEP_PERIOD); } while (endTime - System.currentTimeMillis() > 0); throw new VerificationException("Failed to verify that element exists within " + millis + " ms" + getElementDescription(), lastNotFoundException); } /** * Waits for a period of time (check the 'elementStateChangeDelay' property) the element to became non-existing * * @throws VerificationException if the element does not become non-existing * for the default waiting period (check the 'elementStateChangeDelay' property) */ @PublicAtsApi public void waitToBecomeNotExisting() { waitToBecomeNotExisting(UiEngineConfigurator.getInstance().getElementStateChangeDelay()); } /** * Waits for a period of time the element to became non-existing * * @param millis milliseconds to wait * @throws VerificationException if the element does not become non-existing for the specified period */ @PublicAtsApi public void waitToBecomeNotExisting( int millis ) { long endTime = System.currentTimeMillis() + millis; do { if (!isElementPresent()) { return; } UiEngineUtilities.sleep(SLEEP_PERIOD); } while (endTime - System.currentTimeMillis() > 0); throw new VerificationException("Failed to verify the element is not existing within " + millis + " ms" + getElementDescription(), lastNotFoundException); } /** * Waits for a period of time the element to become enabled * * @param millis milliseconds to wait * @throws VerificationException if the element does not become enabled for the specified period */ @PublicAtsApi public void waitToBecomeEnabled( int millis ) { long endTime = System.currentTimeMillis() + millis; do { if (isElementEnabled()) { return; } UiEngineUtilities.sleep(SLEEP_PERIOD); } while (endTime - System.currentTimeMillis() > 0); throw new VerificationException("Failed to verify that element is enabled within " + millis + " ms" + getElementDescription(), lastNotFoundException); } /** * Waits for a period of time the element to become disabled * * @param millis milliseconds to wait * @throws VerificationException if the element does not become disabled for the specified period */ @PublicAtsApi public void waitToBecomeDisabled( int millis ) { long endTime = System.currentTimeMillis() + millis; do { if (!isElementEnabled()) { return; } UiEngineUtilities.sleep(SLEEP_PERIOD); } while (endTime - System.currentTimeMillis() > 0); throw new VerificationException("Failed to verify that element is disabled within " + millis + " ms" + getElementDescription(), lastNotFoundException); } private String getElementDescription() { return " '" + (element != null ? element.toString() : "Element " + elementProperties.toString()) + "'"; } /** * Check if the element presents or not * * @return if the element presents or not */ @PublicAtsApi public boolean isElementPresent() { try { SwingElementLocator.findFixture(element); return true; } catch (ElementNotFoundException nsee) { lastNotFoundException = nsee; return false; } } /** * Check if the element is visible or not * * @return if the element is visible or not */ @PublicAtsApi public boolean isElementVisible() { try { return SwingElementLocator.findFixture(element).component().isVisible(); } catch (ElementNotFoundException nsee) { lastNotFoundException = nsee; return false; } } /** * Check if the element is enabled or disabled * * @return if the element is enabled or disabled */ @PublicAtsApi public boolean isElementEnabled() { try { return SwingElementLocator.findFixture(element).component().isEnabled(); } catch (ElementNotFoundException nsee) { lastNotFoundException = nsee; return false; } } /** * Check if the element is editable or not * * @return if the element is editable or not */ @PublicAtsApi public boolean isElementEditable() { try { Component component = SwingElementLocator.findFixture(element).component(); if (component instanceof JTextComponent) { return ((JTextComponent) component).isEditable(); } else if (component instanceof JComboBox) { return ((JComboBox) component).isEditable(); } else if (component instanceof JTree) { return ((JTree) component).isEditable(); } throw new NotSupportedOperationException("Component of type \"" + component.getClass().getName() + "\" doesn't have 'editable' state!"); } catch (ElementNotFoundException nsee) { lastNotFoundException = nsee; return false; } } }
/* * Copyright 2014 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.gopivotal.manager; import org.apache.catalina.Contained; import org.apache.catalina.Container; import org.apache.catalina.Session; import org.apache.catalina.Store; import org.apache.catalina.Valve; import org.apache.catalina.connector.Request; import org.apache.catalina.connector.Response; import javax.servlet.ServletException; import java.io.IOException; /** * An implementation for the {@link Valve} interface that flushes any existing sessions before the response is returned. */ public final class SessionFlushValve extends AbstractLifecycle implements Contained, SessionFlushValveManagement, Valve { private final JmxSupport jmxSupport; private final LockTemplate lockTemplate = new LockTemplate(); private volatile Container container; private volatile Valve next; private volatile Store store; /** * Creates a new instance */ public SessionFlushValve() { this(new StandardJmxSupport()); } SessionFlushValve(JmxSupport jmxSupport) { this.jmxSupport = jmxSupport; } @Override public void backgroundProcess() { } @Override public Container getContainer() { return this.lockTemplate.withReadLock(new LockTemplate.LockedOperation<Container>() { @Override public Container invoke() { return SessionFlushValve.this.container; } }); } @Override public void setContainer(final Container container) { this.lockTemplate.withWriteLock(new LockTemplate.LockedOperation<Void>() { @Override public Void invoke() { SessionFlushValve.this.container = container; return null; } }); } @Override public Valve getNext() { return this.lockTemplate.withReadLock(new LockTemplate.LockedOperation<Valve>() { @Override public Valve invoke() { return SessionFlushValve.this.next; } }); } @Override public void setNext(final Valve valve) { this.lockTemplate.withWriteLock(new LockTemplate.LockedOperation<Void>() { @Override public Void invoke() { SessionFlushValve.this.next = valve; return null; } }); } /** * Returns the store used when flushing the session * * @return the store used when flushing the session */ public Store getStore() { return this.lockTemplate.withReadLock(new LockTemplate.LockedOperation<Store>() { @Override public Store invoke() { return SessionFlushValve.this.store; } }); } /** * Sets the store to use when flushing the session * * @param store the store to use when flushing the session */ public void setStore(final Store store) { this.lockTemplate.withWriteLock(new LockTemplate.LockedOperation<Void>() { @Override public Void invoke() { SessionFlushValve.this.store = store; return null; } }); } @Override public void invoke(final Request request, final Response response) { this.lockTemplate.withReadLock(new LockTemplate.LockedOperation<Void>() { @Override public Void invoke() throws IOException, ServletException { try { SessionFlushValve.this.next.invoke(request, response); } finally { Session session = request.getSessionInternal(false); if (session != null && session.isValid()) { SessionFlushValve.this.store.save(session); } } return null; } }); } @Override public boolean isAsyncSupported() { return false; } @Override protected void startInternal() { this.lockTemplate.withReadLock(new LockTemplate.LockedOperation<Void>() { @Override public Void invoke() { SessionFlushValve.this.jmxSupport.register(getObjectName(), SessionFlushValve.this); return null; } }); } @Override protected void stopInternal() { this.lockTemplate.withReadLock(new LockTemplate.LockedOperation<Void>() { @Override public Void invoke() { SessionFlushValve.this.jmxSupport.unregister(getObjectName()); return null; } }); } private String getContext() { String name = this.container.getName(); return name.startsWith("/") ? name : String.format("/%s", name); } private String getObjectName() { String context = getContext(); String host = this.container.getParent().getName(); return String.format("Catalina:type=Valve,context=%s,host=%s,name=%s", context, host, getClass().getSimpleName()); } }
package org.apache.lucene.codecs.lucene40; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.Closeable; import java.io.IOException; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.NoSuchElementException; import org.apache.lucene.codecs.CodecUtil; import org.apache.lucene.codecs.TermVectorsReader; import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.DocsAndPositionsEnum; import org.apache.lucene.index.DocsEnum; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfos; import org.apache.lucene.index.Fields; import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.index.SegmentInfo; import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.store.Directory; import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.IOUtils; /** * Lucene 4.0 Term Vectors reader. * @deprecated only for reading 4.0 and 4.1 segments */ @Deprecated final class Lucene40TermVectorsReader extends TermVectorsReader implements Closeable { static final byte STORE_POSITIONS_WITH_TERMVECTOR = 0x1; static final byte STORE_OFFSET_WITH_TERMVECTOR = 0x2; static final byte STORE_PAYLOAD_WITH_TERMVECTOR = 0x4; /** Extension of vectors fields file */ static final String VECTORS_FIELDS_EXTENSION = "tvf"; /** Extension of vectors documents file */ static final String VECTORS_DOCUMENTS_EXTENSION = "tvd"; /** Extension of vectors index file */ static final String VECTORS_INDEX_EXTENSION = "tvx"; static final String CODEC_NAME_FIELDS = "Lucene40TermVectorsFields"; static final String CODEC_NAME_DOCS = "Lucene40TermVectorsDocs"; static final String CODEC_NAME_INDEX = "Lucene40TermVectorsIndex"; static final int VERSION_NO_PAYLOADS = 0; static final int VERSION_PAYLOADS = 1; static final int VERSION_START = VERSION_NO_PAYLOADS; static final int VERSION_CURRENT = VERSION_PAYLOADS; static final long HEADER_LENGTH_FIELDS = CodecUtil.headerLength(CODEC_NAME_FIELDS); static final long HEADER_LENGTH_DOCS = CodecUtil.headerLength(CODEC_NAME_DOCS); static final long HEADER_LENGTH_INDEX = CodecUtil.headerLength(CODEC_NAME_INDEX); private FieldInfos fieldInfos; private IndexInput tvx; private IndexInput tvd; private IndexInput tvf; private int size; private int numTotalDocs; /** Used by clone. */ Lucene40TermVectorsReader(FieldInfos fieldInfos, IndexInput tvx, IndexInput tvd, IndexInput tvf, int size, int numTotalDocs) { this.fieldInfos = fieldInfos; this.tvx = tvx; this.tvd = tvd; this.tvf = tvf; this.size = size; this.numTotalDocs = numTotalDocs; } /** Sole constructor. */ public Lucene40TermVectorsReader(Directory d, SegmentInfo si, FieldInfos fieldInfos, IOContext context) throws IOException { final String segment = si.name; final int size = si.getDocCount(); boolean success = false; try { String idxName = IndexFileNames.segmentFileName(segment, "", VECTORS_INDEX_EXTENSION); tvx = d.openInput(idxName, context); final int tvxVersion = CodecUtil.checkHeader(tvx, CODEC_NAME_INDEX, VERSION_START, VERSION_CURRENT); String fn = IndexFileNames.segmentFileName(segment, "", VECTORS_DOCUMENTS_EXTENSION); tvd = d.openInput(fn, context); final int tvdVersion = CodecUtil.checkHeader(tvd, CODEC_NAME_DOCS, VERSION_START, VERSION_CURRENT); fn = IndexFileNames.segmentFileName(segment, "", VECTORS_FIELDS_EXTENSION); tvf = d.openInput(fn, context); final int tvfVersion = CodecUtil.checkHeader(tvf, CODEC_NAME_FIELDS, VERSION_START, VERSION_CURRENT); assert HEADER_LENGTH_INDEX == tvx.getFilePointer(); assert HEADER_LENGTH_DOCS == tvd.getFilePointer(); assert HEADER_LENGTH_FIELDS == tvf.getFilePointer(); if (tvxVersion != tvdVersion) { throw new CorruptIndexException("version mismatch: tvx=" + tvxVersion + " != tvd=" + tvdVersion, tvd); } if (tvxVersion != tvfVersion) { throw new CorruptIndexException("version mismatch: tvx=" + tvxVersion + " != tvf=" + tvfVersion, tvf); } numTotalDocs = (int) (tvx.length()-HEADER_LENGTH_INDEX >> 4); this.size = numTotalDocs; assert size == 0 || numTotalDocs == size; this.fieldInfos = fieldInfos; success = true; } finally { // With lock-less commits, it's entirely possible (and // fine) to hit a FileNotFound exception above. In // this case, we want to explicitly close any subset // of things that were opened so that we don't have to // wait for a GC to do so. if (!success) { try { close(); } catch (Throwable t) {} // ensure we throw our original exception } } } // Not private to avoid synthetic access$NNN methods void seekTvx(final int docNum) throws IOException { tvx.seek(docNum * 16L + HEADER_LENGTH_INDEX); } @Override public void close() throws IOException { IOUtils.close(tvx, tvd, tvf); } /** * * @return The number of documents in the reader */ int size() { return size; } private class TVFields extends Fields { private final int[] fieldNumbers; private final long[] fieldFPs; private final Map<Integer,Integer> fieldNumberToIndex = new HashMap<>(); public TVFields(int docID) throws IOException { seekTvx(docID); tvd.seek(tvx.readLong()); final int fieldCount = tvd.readVInt(); assert fieldCount >= 0; if (fieldCount != 0) { fieldNumbers = new int[fieldCount]; fieldFPs = new long[fieldCount]; for(int fieldUpto=0;fieldUpto<fieldCount;fieldUpto++) { final int fieldNumber = tvd.readVInt(); fieldNumbers[fieldUpto] = fieldNumber; fieldNumberToIndex.put(fieldNumber, fieldUpto); } long position = tvx.readLong(); fieldFPs[0] = position; for(int fieldUpto=1;fieldUpto<fieldCount;fieldUpto++) { position += tvd.readVLong(); fieldFPs[fieldUpto] = position; } } else { // TODO: we can improve writer here, eg write 0 into // tvx file, so we know on first read from tvx that // this doc has no TVs fieldNumbers = null; fieldFPs = null; } } @Override public Iterator<String> iterator() { return new Iterator<String>() { private int fieldUpto; @Override public String next() { if (fieldNumbers != null && fieldUpto < fieldNumbers.length) { return fieldInfos.fieldInfo(fieldNumbers[fieldUpto++]).name; } else { throw new NoSuchElementException(); } } @Override public boolean hasNext() { return fieldNumbers != null && fieldUpto < fieldNumbers.length; } @Override public void remove() { throw new UnsupportedOperationException(); } }; } @Override public Terms terms(String field) throws IOException { final FieldInfo fieldInfo = fieldInfos.fieldInfo(field); if (fieldInfo == null) { // No such field return null; } final Integer fieldIndex = fieldNumberToIndex.get(fieldInfo.number); if (fieldIndex == null) { // Term vectors were not indexed for this field return null; } return new TVTerms(fieldFPs[fieldIndex]); } @Override public int size() { if (fieldNumbers == null) { return 0; } else { return fieldNumbers.length; } } } private class TVTerms extends Terms { private final int numTerms; private final long tvfFPStart; private final boolean storePositions; private final boolean storeOffsets; private final boolean storePayloads; public TVTerms(long tvfFP) throws IOException { tvf.seek(tvfFP); numTerms = tvf.readVInt(); final byte bits = tvf.readByte(); storePositions = (bits & STORE_POSITIONS_WITH_TERMVECTOR) != 0; storeOffsets = (bits & STORE_OFFSET_WITH_TERMVECTOR) != 0; storePayloads = (bits & STORE_PAYLOAD_WITH_TERMVECTOR) != 0; tvfFPStart = tvf.getFilePointer(); } @Override public TermsEnum iterator(TermsEnum reuse) throws IOException { TVTermsEnum termsEnum; if (reuse instanceof TVTermsEnum) { termsEnum = (TVTermsEnum) reuse; if (!termsEnum.canReuse(tvf)) { termsEnum = new TVTermsEnum(); } } else { termsEnum = new TVTermsEnum(); } termsEnum.reset(numTerms, tvfFPStart, storePositions, storeOffsets, storePayloads); return termsEnum; } @Override public long size() { return numTerms; } @Override public long getSumTotalTermFreq() { return -1; } @Override public long getSumDocFreq() { // Every term occurs in just one doc: return numTerms; } @Override public int getDocCount() { return 1; } @Override public boolean hasFreqs() { return true; } @Override public boolean hasOffsets() { return storeOffsets; } @Override public boolean hasPositions() { return storePositions; } @Override public boolean hasPayloads() { return storePayloads; } } private class TVTermsEnum extends TermsEnum { private final IndexInput origTVF; private final IndexInput tvf; private int numTerms; private int nextTerm; private int freq; private BytesRefBuilder lastTerm = new BytesRefBuilder(); private BytesRefBuilder term = new BytesRefBuilder(); private boolean storePositions; private boolean storeOffsets; private boolean storePayloads; private long tvfFP; private int[] positions; private int[] startOffsets; private int[] endOffsets; // one shared byte[] for any term's payloads private int[] payloadOffsets; private int lastPayloadLength; private byte[] payloadData; // NOTE: tvf is pre-positioned by caller public TVTermsEnum() { this.origTVF = Lucene40TermVectorsReader.this.tvf; tvf = origTVF.clone(); } public boolean canReuse(IndexInput tvf) { return tvf == origTVF; } public void reset(int numTerms, long tvfFPStart, boolean storePositions, boolean storeOffsets, boolean storePayloads) throws IOException { this.numTerms = numTerms; this.storePositions = storePositions; this.storeOffsets = storeOffsets; this.storePayloads = storePayloads; nextTerm = 0; tvf.seek(tvfFPStart); tvfFP = tvfFPStart; positions = null; startOffsets = null; endOffsets = null; payloadOffsets = null; payloadData = null; lastPayloadLength = -1; } // NOTE: slow! (linear scan) @Override public SeekStatus seekCeil(BytesRef text) throws IOException { if (nextTerm != 0) { final int cmp = text.compareTo(term.get()); if (cmp < 0) { nextTerm = 0; tvf.seek(tvfFP); } else if (cmp == 0) { return SeekStatus.FOUND; } } while (next() != null) { final int cmp = text.compareTo(term.get()); if (cmp < 0) { return SeekStatus.NOT_FOUND; } else if (cmp == 0) { return SeekStatus.FOUND; } } return SeekStatus.END; } @Override public void seekExact(long ord) { throw new UnsupportedOperationException(); } @Override public BytesRef next() throws IOException { if (nextTerm >= numTerms) { return null; } term.copyBytes(lastTerm.get()); final int start = tvf.readVInt(); final int deltaLen = tvf.readVInt(); term.setLength(start + deltaLen); term.grow(term.length()); tvf.readBytes(term.bytes(), start, deltaLen); freq = tvf.readVInt(); if (storePayloads) { positions = new int[freq]; payloadOffsets = new int[freq]; int totalPayloadLength = 0; int pos = 0; for(int posUpto=0;posUpto<freq;posUpto++) { int code = tvf.readVInt(); pos += code >>> 1; positions[posUpto] = pos; if ((code & 1) != 0) { // length change lastPayloadLength = tvf.readVInt(); } payloadOffsets[posUpto] = totalPayloadLength; totalPayloadLength += lastPayloadLength; assert totalPayloadLength >= 0; } payloadData = new byte[totalPayloadLength]; tvf.readBytes(payloadData, 0, payloadData.length); } else if (storePositions /* no payloads */) { // TODO: we could maybe reuse last array, if we can // somehow be careful about consumer never using two // D&PEnums at once... positions = new int[freq]; int pos = 0; for(int posUpto=0;posUpto<freq;posUpto++) { pos += tvf.readVInt(); positions[posUpto] = pos; } } if (storeOffsets) { startOffsets = new int[freq]; endOffsets = new int[freq]; int offset = 0; for(int posUpto=0;posUpto<freq;posUpto++) { startOffsets[posUpto] = offset + tvf.readVInt(); offset = endOffsets[posUpto] = startOffsets[posUpto] + tvf.readVInt(); } } lastTerm.copyBytes(term.get()); nextTerm++; return term.get(); } @Override public BytesRef term() { return term.get(); } @Override public long ord() { throw new UnsupportedOperationException(); } @Override public int docFreq() { return 1; } @Override public long totalTermFreq() { return freq; } @Override public DocsEnum docs(Bits liveDocs, DocsEnum reuse, int flags /* ignored */) throws IOException { TVDocsEnum docsEnum; if (reuse != null && reuse instanceof TVDocsEnum) { docsEnum = (TVDocsEnum) reuse; } else { docsEnum = new TVDocsEnum(); } docsEnum.reset(liveDocs, freq); return docsEnum; } @Override public DocsAndPositionsEnum docsAndPositions(Bits liveDocs, DocsAndPositionsEnum reuse, int flags) throws IOException { if (!storePositions && !storeOffsets) { return null; } TVDocsAndPositionsEnum docsAndPositionsEnum; if (reuse != null && reuse instanceof TVDocsAndPositionsEnum) { docsAndPositionsEnum = (TVDocsAndPositionsEnum) reuse; } else { docsAndPositionsEnum = new TVDocsAndPositionsEnum(); } docsAndPositionsEnum.reset(liveDocs, positions, startOffsets, endOffsets, payloadOffsets, payloadData); return docsAndPositionsEnum; } } // NOTE: sort of a silly class, since you can get the // freq() already by TermsEnum.totalTermFreq private static class TVDocsEnum extends DocsEnum { private boolean didNext; private int doc = -1; private int freq; private Bits liveDocs; @Override public int freq() throws IOException { return freq; } @Override public int docID() { return doc; } @Override public int nextDoc() { if (!didNext && (liveDocs == null || liveDocs.get(0))) { didNext = true; return (doc = 0); } else { return (doc = NO_MORE_DOCS); } } @Override public int advance(int target) throws IOException { return slowAdvance(target); } public void reset(Bits liveDocs, int freq) { this.liveDocs = liveDocs; this.freq = freq; this.doc = -1; didNext = false; } @Override public long cost() { return 1; } } private static class TVDocsAndPositionsEnum extends DocsAndPositionsEnum { private boolean didNext; private int doc = -1; private int nextPos; private Bits liveDocs; private int[] positions; private int[] startOffsets; private int[] endOffsets; private int[] payloadOffsets; private BytesRef payload = new BytesRef(); private byte[] payloadBytes; @Override public int freq() throws IOException { if (positions != null) { return positions.length; } else { assert startOffsets != null; return startOffsets.length; } } @Override public int docID() { return doc; } @Override public int nextDoc() { if (!didNext && (liveDocs == null || liveDocs.get(0))) { didNext = true; return (doc = 0); } else { return (doc = NO_MORE_DOCS); } } @Override public int advance(int target) throws IOException { return slowAdvance(target); } public void reset(Bits liveDocs, int[] positions, int[] startOffsets, int[] endOffsets, int[] payloadLengths, byte[] payloadBytes) { this.liveDocs = liveDocs; this.positions = positions; this.startOffsets = startOffsets; this.endOffsets = endOffsets; this.payloadOffsets = payloadLengths; this.payloadBytes = payloadBytes; this.doc = -1; didNext = false; nextPos = 0; } @Override public BytesRef getPayload() { if (payloadOffsets == null) { return null; } else { int off = payloadOffsets[nextPos-1]; int end = nextPos == payloadOffsets.length ? payloadBytes.length : payloadOffsets[nextPos]; if (end - off == 0) { return null; } payload.bytes = payloadBytes; payload.offset = off; payload.length = end - off; return payload; } } @Override public int nextPosition() { assert (positions != null && nextPos < positions.length) || startOffsets != null && nextPos < startOffsets.length; if (positions != null) { return positions[nextPos++]; } else { nextPos++; return -1; } } @Override public int startOffset() { if (startOffsets == null) { return -1; } else { return startOffsets[nextPos-1]; } } @Override public int endOffset() { if (endOffsets == null) { return -1; } else { return endOffsets[nextPos-1]; } } @Override public long cost() { return 1; } } @Override public Fields get(int docID) throws IOException { if (tvx != null) { Fields fields = new TVFields(docID); if (fields.size() == 0) { // TODO: we can improve writer here, eg write 0 into // tvx file, so we know on first read from tvx that // this doc has no TVs return null; } else { return fields; } } else { return null; } } @Override public TermVectorsReader clone() { IndexInput cloneTvx = null; IndexInput cloneTvd = null; IndexInput cloneTvf = null; // These are null when a TermVectorsReader was created // on a segment that did not have term vectors saved if (tvx != null && tvd != null && tvf != null) { cloneTvx = tvx.clone(); cloneTvd = tvd.clone(); cloneTvf = tvf.clone(); } return new Lucene40TermVectorsReader(fieldInfos, cloneTvx, cloneTvd, cloneTvf, size, numTotalDocs); } @Override public long ramBytesUsed() { return 0; } @Override public Collection<Accountable> getChildResources() { return Collections.emptyList(); } @Override public void checkIntegrity() throws IOException {} @Override public String toString() { return getClass().getSimpleName(); } }
/* * Copyright 2012 Corpuslinguistic working group Humboldt University Berlin. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package annis.gui.flatquerybuilder; import annis.libgui.Helper; import com.vaadin.data.Property.ValueChangeEvent; import com.vaadin.data.Property.ValueChangeListener; import com.vaadin.event.FieldEvents; import com.vaadin.ui.AbstractSelect; import com.vaadin.ui.Button; import com.vaadin.ui.CheckBox; import com.vaadin.ui.ComboBox; import com.vaadin.ui.HorizontalLayout; import com.vaadin.ui.Label; import com.vaadin.ui.Panel; import com.vaadin.ui.themes.ChameleonTheme; import java.text.Normalizer; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; import java.util.concurrent.ConcurrentSkipListSet; import org.apache.commons.lang3.StringUtils; /** * @author Tom * @author Martin */ public class SpanBox extends Panel implements Button.ClickListener, FieldEvents.TextChangeListener { private Button btClose; private String ebene; private HorizontalLayout sb; private SensitiveComboBox cb; private CheckBox reBox;//by Martin, tick for regular expression private Collection<String> annonames;//added by Martin, necessary for rebuilding the list of cb-Items private FlatQueryBuilder sq; public static final String BUTTON_CLOSE_LABEL = "Close"; private static final String SB_CB_WIDTH = "140px"; public SpanBox(final String ebene, final FlatQueryBuilder sq) { this(ebene, sq, false); } public SpanBox(final String ebene, final FlatQueryBuilder sq, boolean isRegex) { this.ebene = ebene; this.sq = sq; sb = new HorizontalLayout(); sb.setImmediate(true); sb.setSpacing(true); sb.setMargin(true); ConcurrentSkipListSet<String> annos = new ConcurrentSkipListSet<>(); for(String a : sq.getAvailableAnnotationLevels(ebene)) { annos.add(a); } this.annonames = annos; Label tf = new Label(ebene); sb.addComponent(tf); this.cb = new SensitiveComboBox(); cb.setWidth(SB_CB_WIDTH); cb.addStyleName(Helper.CORPUS_FONT_FORCE); // configure & load content cb.setImmediate(true); cb.setNewItemsAllowed(true); cb.setTextInputAllowed(true); for (String annoname : this.annonames) { cb.addItem(annoname); } cb.setFilteringMode(AbstractSelect.Filtering.FILTERINGMODE_OFF); cb.addListener((FieldEvents.TextChangeListener)this); sb.addComponent(cb); HorizontalLayout sbtoolbar = new HorizontalLayout(); sbtoolbar.setSpacing(true); // searchbox tickbox for regex CheckBox tb = new CheckBox("Regex"); tb.setImmediate(true); tb.setValue(isRegex); sbtoolbar.addComponent(tb); tb.addValueChangeListener(new ValueChangeListener() { // TODO make this into a nice subroutine @Override public void valueChange(ValueChangeEvent event) { boolean r = reBox.getValue(); if(!r) { SpanBox.buildBoxValues(cb, ebene, sq); } else if(cb.getValue()!=null) { String escapedItem = sq.escapeRegexCharacters(cb.getValue().toString()); cb.addItem(escapedItem); cb.setValue(escapedItem); } } }); reBox = tb; // close the searchbox btClose = new Button(BUTTON_CLOSE_LABEL, (Button.ClickListener) this); btClose.setStyleName(ChameleonTheme.BUTTON_SMALL); sbtoolbar.addComponent(btClose); // make visable sb.addComponent(sbtoolbar); setContent(sb); } @Override public void buttonClick(Button.ClickEvent event) { // close functionality if(event.getButton() == btClose) { sb.removeComponent(cb); cb.setValue(""); sq.removeSpanBox(this); } // regex box functionality else if(event.getComponent()==reBox) { boolean r = reBox.booleanValue(); cb.setNewItemsAllowed(r); if(!r) { SpanBox.buildBoxValues(cb, ebene, sq); } else if(cb.getValue()!=null) { String escapedItem = sq.escapeRegexCharacters(cb.getValue().toString()); cb.addItem(escapedItem); cb.setValue(escapedItem); } } } @Override public void textChange(FieldEvents.TextChangeEvent event) { String txt = event.getText(); HashMap<Integer, Collection> levdistvals = new HashMap<>(); if (txt.length() > 1) { cb.removeAllItems(); for(String s : annonames) { Integer d = StringUtils.getLevenshteinDistance(removeAccents(txt), removeAccents(s)); if (levdistvals.containsKey(d)){ levdistvals.get(d).add(s); } if (!levdistvals.containsKey(d)){ Set<String> newc = new TreeSet<>(); newc.add(s); levdistvals.put(d, newc); } } SortedSet<Integer> keys = new TreeSet<>(levdistvals.keySet()); for(Integer k : keys.subSet(0, 5)){ List<String> values = new ArrayList(levdistvals.get(k)); Collections.sort(values, String.CASE_INSENSITIVE_ORDER); for(String v : values){ cb.addItem(v); } } } } public String getAttribute() { return ebene; } public String getValue() { return cb.getValue().toString(); } public void setValue(String value) { if(reBox.getValue()) { cb.addItem(value); } cb.setValue(value); } public boolean isRegEx() { return reBox.getValue(); } public static String removeAccents(String text) { return text == null ? null : Normalizer.normalize(text, Normalizer.Form.NFD) .replaceAll("\\p{InCombiningDiacriticalMarks}+", ""); } public static void buildBoxValues(ComboBox cb, String level, FlatQueryBuilder sq) { String value = (cb.getValue()!=null) ? cb.getValue().toString() : ""; Collection<String> annovals = sq.getAnnotationValues(level); cb.removeAllItems(); for (String s : annovals) { cb.addItem(s); } if (annovals.contains(value)) { cb.setValue(value); } else { cb.setValue(null); } } }
/*** Author :Vibhav Gogate The University of Texas at Dallas *****/ import java.awt.AlphaComposite; import java.awt.Color; import java.awt.Graphics2D; import java.awt.RenderingHints; import java.awt.image.BufferedImage; import java.io.File; import java.io.IOException; import java.util.Random; import javax.imageio.ImageIO; public class KMeans { public static void main( String [] args ) { if ( args.length < 3 ) { System.out.println( "Usage: Kmeans <input-image> <k> <output-image>" ); return; } try { // Read original image from file, run through kmeans, and output results. BufferedImage originalImage = ImageIO.read( new File(args[0]) ); BufferedImage kmeansJpg = kmeans_helper( originalImage, Integer.parseInt(args[1]) ); ImageIO.write( kmeansJpg, "png", new File(args[2]) ); } catch ( IOException e ) { System.out.println( e.getMessage() ); } } /** * @param originalImage - An image to compress * @param k - The number of clusters to use to compress originalImage * @return A compressed image using the kmeans algorithm */ private static BufferedImage kmeans_helper( BufferedImage originalImage, int k ) { int w = originalImage.getWidth(); int h = originalImage.getHeight(); BufferedImage kmeansImage = new BufferedImage( w, h, originalImage.getType() ); Graphics2D g = kmeansImage.createGraphics(); g.drawImage( originalImage, 0, 0, w, h, null ); // Read rgb values from the image. int[] imageRGB = new int[(w*h)]; int counter = 0; for ( int i = 0; i < w; i++ ) { for( int j = 0; j < h; j++ ) { imageRGB[counter++] = kmeansImage.getRGB(i,j); } } // Call kmeans algorithm: update the rgb values to compress image. kmeans( imageRGB,k ); // Write the new rgb values to the image. counter = 0; for( int i = 0; i < w; i++ ) { for( int j = 0; j < h; j++ ) { kmeansImage.setRGB( i, j, imageRGB[counter++] ); } } // Return the compressed image return kmeansImage; } /** * Performs the k-means algorithm with specified k on the pixels * @param rgb - RGB values tightly packed for an image * @param k - The number of clusters to use for compression * @pre Assumes that you have at least k pixels. */ private static void kmeans( int[] pixels, int k ) { if ( pixels.length < k ) { System.out.println( "You must supply at least k pixels." ); return; } int[] previousCenters = new int[k]; // Previous iteration's cluster centers int[] currentCenters = new int[k]; // Current iteration's cluster centers int[] pixelsInCluster = new int[k]; // Number of pixels belonging to a cluster int[] clusterTotalAlpha = new int[k]; // Total alpha in a cluster int[] clusterTotalRed = new int[k]; // Total red in a cluster int[] clusterTotalGreen = new int[k]; // Total green in a cluster int[] clusterTotalBlue = new int[k]; // Total blue in a cluster int[] clusterAssignment = new int[pixels.length]; // Cluster assignment indices for each pixel int iterationCounter = 0; // Number of iterations processed for k-means double maxDist = Double.MAX_VALUE; // Used in k-means double curDist = 0; // Used in k-means int closestCenter = 0; // Used in k-means // Seed initial cluster centers by picking k random pixels from the supplied // set. Do not allow repeats. for ( int i = 0; i < currentCenters.length; i++ ) { Random rng = new Random(); int centerValue = 0; do { centerValue = pixels[rng.nextInt( pixels.length )]; } while ( exists( centerValue, previousCenters ) ); currentCenters[i] = centerValue; } // Perform k-means! do { //iterationCounter++; //System.out.println( iterationCounter ); // Copy current centers into previous centers for convergence comparison later. // Also reset counts for the current iteration. for ( int i = 0; i < currentCenters.length; i++ ) { previousCenters[i] = currentCenters[i]; pixelsInCluster[i] = 0; clusterTotalAlpha[i] = 0; clusterTotalRed[i] = 0; clusterTotalGreen[i] = 0; clusterTotalBlue[i] = 0; } // For each pixel, find its closest cluster center, update the // clusterAssignment for each pixel. Update cluster color counts. for ( int i = 0; i < pixels.length; i++ ) { maxDist = Double.MAX_VALUE; for ( int j = 0; j < currentCenters.length; j++ ) { curDist = pixelDist( pixels[i], currentCenters[j] ); if ( curDist < maxDist ) { maxDist = curDist; closestCenter = j; } } clusterAssignment[i] = closestCenter; pixelsInCluster[closestCenter]++; clusterTotalAlpha[closestCenter] += ((pixels[i] & 0xFF000000) >>> 24); clusterTotalRed[closestCenter] += ((pixels[i] & 0x00FF0000) >>> 16); clusterTotalGreen[closestCenter] += ((pixels[i] & 0x0000FF00) >>> 8); clusterTotalBlue[closestCenter] += ((pixels[i] & 0x000000FF) >>> 0); } // Update cluster centers to the average of all pixels belonging to cluster. // Average A, R, G, and B components individually and then pack into center. for ( int i = 0; i < currentCenters.length; i++ ) { int averageAlpha = (int)((double)clusterTotalAlpha[i] / (double)pixelsInCluster[i]); int averageRed = (int)((double)clusterTotalRed[i] / (double)pixelsInCluster[i]); int averageGreen = (int)((double)clusterTotalGreen[i] / (double)pixelsInCluster[i]); int averageBlue = (int)((double)clusterTotalBlue[i] / (double)pixelsInCluster[i]); currentCenters[i] = ((averageAlpha & 0x000000FF) << 24) | ((averageRed & 0x000000FF) << 16) | ((averageGreen & 0x000000FF) << 8) | ((averageBlue & 0x000000FF) << 0); } } while( !converged(previousCenters, currentCenters) ); // k-means has converged. Assign each pixel the value of its center. for ( int i = 0; i < pixels.length; i++ ) { pixels[i] = currentCenters[clusterAssignment[i]]; } } /** * Checks to see if value exists in the array * @param value - The value to search the array for * @param array - The array to search for value in * @return true if value exists in the array, false otherwise */ private static boolean exists( int value, int[] array ) { for ( int i = 0; i < array.length; i++ ) if ( array[i] == value ) return true; return false; } /** * Checks to see if the values in the two arrays have converged * @param a - The first array * @param b - The second array * @return true if the arrays have converged */ private static boolean converged( int[] a, int[] b ) { for ( int i = 0; i < a.length; i++ ) if ( a[i] != b[i] ) return false; return true; } /** * Treats pixA and pixB as 4D points whose values are tightly packed and * calculates the Euclidian distance between them. Pixels are stored according to * aorder A, B, G, R from lower to higher byte addresses within each pixel. * @param pixA - The first pixel * @param pixB - The second pixel * @return The Euclidian distance between pixA and pixB */ private static double pixelDist( int pixA, int pixB ) { int aDiff = ((pixA & 0xFF000000) >>> 24) - ((pixB & 0xFF000000) >>> 24); int rDiff = ((pixA & 0x00FF0000) >>> 16) - ((pixB & 0x00FF0000) >>> 16); int gDiff = ((pixA & 0x0000FF00) >>> 8) - ((pixB & 0x0000FF00) >>> 8); int bDiff = ((pixA & 0x000000FF) >>> 0) - ((pixB & 0x000000FF) >>> 0); return Math.sqrt( aDiff*aDiff + rDiff*rDiff + gDiff*gDiff + bDiff*bDiff ); } }
/* * Copyright Terracotta, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.ehcache.impl.internal.store.disk; import org.ehcache.config.SizedResourcePool; import org.ehcache.core.CacheConfigurationChangeListener; import org.ehcache.Status; import org.ehcache.config.EvictionAdvisor; import org.ehcache.config.ResourceType; import org.ehcache.core.spi.service.DiskResourceService; import org.ehcache.core.spi.service.StatisticsService; import org.ehcache.core.statistics.OperationStatistic; import org.ehcache.impl.config.store.disk.OffHeapDiskStoreConfiguration; import org.ehcache.config.units.MemoryUnit; import org.ehcache.core.events.StoreEventDispatcher; import org.ehcache.CachePersistenceException; import org.ehcache.impl.internal.events.ThreadLocalStoreEventDispatcher; import org.ehcache.impl.internal.store.disk.factories.EhcachePersistentSegmentFactory; import org.ehcache.impl.internal.store.offheap.AbstractOffHeapStore; import org.ehcache.impl.internal.store.offheap.EhcacheOffHeapBackingMap; import org.ehcache.impl.internal.store.offheap.SwitchableEvictionAdvisor; import org.ehcache.impl.internal.store.offheap.OffHeapValueHolder; import org.ehcache.impl.internal.store.offheap.portability.SerializerPortability; import org.ehcache.core.spi.time.TimeSource; import org.ehcache.core.spi.time.TimeSourceService; import org.ehcache.spi.persistence.PersistableResourceService.PersistenceSpaceIdentifier; import org.ehcache.spi.persistence.StateRepository; import org.ehcache.spi.serialization.StatefulSerializer; import org.ehcache.spi.service.OptionalServiceDependencies; import org.ehcache.spi.service.ServiceProvider; import org.ehcache.core.spi.store.Store; import org.ehcache.core.spi.store.tiering.AuthoritativeTier; import org.ehcache.spi.serialization.SerializationProvider; import org.ehcache.spi.serialization.Serializer; import org.ehcache.core.spi.service.ExecutionService; import org.ehcache.core.spi.service.FileBasedPersistenceContext; import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceConfiguration; import org.ehcache.spi.service.ServiceDependencies; import org.ehcache.core.collections.ConcurrentWeakIdentityHashMap; import org.ehcache.core.statistics.TierOperationOutcomes; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.terracotta.offheapstore.disk.paging.MappedPageSource; import org.terracotta.offheapstore.disk.persistent.Persistent; import org.terracotta.offheapstore.disk.persistent.PersistentPortability; import org.terracotta.offheapstore.disk.storage.FileBackedStorageEngine; import org.terracotta.offheapstore.storage.portability.Portability; import org.terracotta.offheapstore.util.Factory; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.lang.reflect.Proxy; import java.util.Collection; import java.util.Collections; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; import static java.lang.Math.max; import static org.ehcache.config.Eviction.noAdvice; import static org.ehcache.core.spi.service.ServiceUtils.findSingletonAmongst; import static java.util.Arrays.asList; import static org.terracotta.offheapstore.util.MemoryUnit.BYTES; /** * Implementation of {@link Store} supporting disk-resident persistence. */ public class OffHeapDiskStore<K, V> extends AbstractOffHeapStore<K, V> implements AuthoritativeTier<K, V> { private static final Logger LOGGER = LoggerFactory.getLogger(OffHeapDiskStore.class); private static final String KEY_TYPE_PROPERTY_NAME = "keyType"; private static final String VALUE_TYPE_PROPERTY_NAME = "valueType"; protected final AtomicReference<Status> status = new AtomicReference<>(Status.UNINITIALIZED); private final SwitchableEvictionAdvisor<K, OffHeapValueHolder<V>> evictionAdvisor; private final Class<K> keyType; private final Class<V> valueType; private final ClassLoader classLoader; private final Serializer<K> keySerializer; private final Serializer<V> valueSerializer; private final long sizeInBytes; private final FileBasedPersistenceContext fileBasedPersistenceContext; private final ExecutionService executionService; private final String threadPoolAlias; private final int writerConcurrency; private final int diskSegments; private volatile EhcachePersistentConcurrentOffHeapClockCache<K, OffHeapValueHolder<V>> map; public OffHeapDiskStore(FileBasedPersistenceContext fileBasedPersistenceContext, ExecutionService executionService, String threadPoolAlias, int writerConcurrency, int diskSegments, final Configuration<K, V> config, TimeSource timeSource, StoreEventDispatcher<K, V> eventDispatcher, long sizeInBytes, StatisticsService statisticsService) { super(config, timeSource, eventDispatcher, statisticsService); this.fileBasedPersistenceContext = fileBasedPersistenceContext; this.executionService = executionService; this.threadPoolAlias = threadPoolAlias; this.writerConcurrency = writerConcurrency; this.diskSegments = diskSegments; EvictionAdvisor<? super K, ? super V> evictionAdvisor = config.getEvictionAdvisor(); if (evictionAdvisor != null) { this.evictionAdvisor = wrap(evictionAdvisor); } else { this.evictionAdvisor = wrap(noAdvice()); } this.keyType = config.getKeyType(); this.valueType = config.getValueType(); this.classLoader = config.getClassLoader(); this.keySerializer = config.getKeySerializer(); this.valueSerializer = config.getValueSerializer(); this.sizeInBytes = sizeInBytes; if (!status.compareAndSet(Status.UNINITIALIZED, Status.AVAILABLE)) { throw new AssertionError(); } } @Override protected String getStatisticsTag() { return "Disk"; } @Override public List<CacheConfigurationChangeListener> getConfigurationChangeListeners() { return Collections.emptyList(); } private EhcachePersistentConcurrentOffHeapClockCache<K, OffHeapValueHolder<V>> getBackingMap(long size, Serializer<K> keySerializer, Serializer<V> valueSerializer, SwitchableEvictionAdvisor<K, OffHeapValueHolder<V>> evictionAdvisor) { File dataFile = getDataFile(); File indexFile = getIndexFile(); File metadataFile = getMetadataFile(); if (dataFile.isFile() && indexFile.isFile() && metadataFile.isFile()) { try { return recoverBackingMap(size, keySerializer, valueSerializer, evictionAdvisor); } catch (IOException ex) { throw new RuntimeException(ex); } } else { try { return createBackingMap(size, keySerializer, valueSerializer, evictionAdvisor); } catch (IOException ex) { throw new RuntimeException(ex); } } } private EhcachePersistentConcurrentOffHeapClockCache<K, OffHeapValueHolder<V>> recoverBackingMap(long size, Serializer<K> keySerializer, Serializer<V> valueSerializer, SwitchableEvictionAdvisor<K, OffHeapValueHolder<V>> evictionAdvisor) throws IOException { File dataFile = getDataFile(); File indexFile = getIndexFile(); File metadataFile = getMetadataFile(); Properties properties = new Properties(); try (FileInputStream fis = new FileInputStream(metadataFile)) { properties.load(fis); } try { Class<?> persistedKeyType = Class.forName(properties.getProperty(KEY_TYPE_PROPERTY_NAME), false, classLoader); if (!keyType.equals(persistedKeyType)) { throw new IllegalArgumentException("Persisted key type '" + persistedKeyType.getName() + "' is not the same as the configured key type '" + keyType.getName() + "'"); } } catch (ClassNotFoundException cnfe) { throw new IllegalStateException("Persisted key type class not found", cnfe); } try { Class<?> persistedValueType = Class.forName(properties.getProperty(VALUE_TYPE_PROPERTY_NAME), false, classLoader); if (!valueType.equals(persistedValueType)) { throw new IllegalArgumentException("Persisted value type '" + persistedValueType.getName() + "' is not the same as the configured value type '" + valueType.getName() + "'"); } } catch (ClassNotFoundException cnfe) { throw new IllegalStateException("Persisted value type class not found", cnfe); } try (FileInputStream fin = new FileInputStream(indexFile)) { ObjectInputStream input = new ObjectInputStream(fin); long dataTimestampFromIndex = input.readLong(); long dataTimestampFromFile = dataFile.lastModified(); long delta = dataTimestampFromFile - dataTimestampFromIndex; if (delta < 0) { LOGGER.info("The index for data file {} is more recent than the data file itself by {}ms : this is harmless.", dataFile.getName(), -delta); } else if (delta > TimeUnit.SECONDS.toMillis(1)) { LOGGER.warn("The index for data file {} is out of date by {}ms, probably due to an unclean shutdown. Creating a new empty store.", dataFile.getName(), delta); return createBackingMap(size, keySerializer, valueSerializer, evictionAdvisor); } else if (delta > 0) { LOGGER.info("The index for data file {} is out of date by {}ms, assuming this small delta is a result of the OS/filesystem.", dataFile.getName(), delta); } MappedPageSource source = new MappedPageSource(dataFile, false, size); try { PersistentPortability<K> keyPortability = persistent(new SerializerPortability<>(keySerializer)); PersistentPortability<OffHeapValueHolder<V>> valuePortability = persistent(createValuePortability(valueSerializer)); DiskWriteThreadPool writeWorkers = new DiskWriteThreadPool(executionService, threadPoolAlias, writerConcurrency); Factory<FileBackedStorageEngine<K, OffHeapValueHolder<V>>> storageEngineFactory = FileBackedStorageEngine.createFactory(source, max((size / diskSegments) / 10, 1024), BYTES, keyPortability, valuePortability, writeWorkers, false); EhcachePersistentSegmentFactory<K, OffHeapValueHolder<V>> factory = new EhcachePersistentSegmentFactory<>( source, storageEngineFactory, 64, evictionAdvisor, mapEvictionListener, false); EhcachePersistentConcurrentOffHeapClockCache<K, OffHeapValueHolder<V>> m = new EhcachePersistentConcurrentOffHeapClockCache<>(input, evictionAdvisor, factory); m.bootstrap(input); return m; } catch (IOException e) { source.close(); throw e; } } catch (Exception e) { LOGGER.info("Index file was corrupt. Deleting data file {}. {}", dataFile.getAbsolutePath(), e.getMessage()); LOGGER.debug("Exception during recovery", e); return createBackingMap(size, keySerializer, valueSerializer, evictionAdvisor); } } private EhcachePersistentConcurrentOffHeapClockCache<K, OffHeapValueHolder<V>> createBackingMap(long size, Serializer<K> keySerializer, Serializer<V> valueSerializer, SwitchableEvictionAdvisor<K, OffHeapValueHolder<V>> evictionAdvisor) throws IOException { File metadataFile = getMetadataFile(); try (FileOutputStream fos = new FileOutputStream(metadataFile)) { Properties properties = new Properties(); properties.put(KEY_TYPE_PROPERTY_NAME, keyType.getName()); properties.put(VALUE_TYPE_PROPERTY_NAME, valueType.getName()); properties.store(fos, "Key and value types"); } MappedPageSource source = new MappedPageSource(getDataFile(), size); PersistentPortability<K> keyPortability = persistent(new SerializerPortability<>(keySerializer)); PersistentPortability<OffHeapValueHolder<V>> valuePortability = persistent(createValuePortability(valueSerializer)); DiskWriteThreadPool writeWorkers = new DiskWriteThreadPool(executionService, threadPoolAlias, writerConcurrency); Factory<FileBackedStorageEngine<K, OffHeapValueHolder<V>>> storageEngineFactory = FileBackedStorageEngine.createFactory(source, max((size / diskSegments) / 10, 1024), BYTES, keyPortability, valuePortability, writeWorkers, true); EhcachePersistentSegmentFactory<K, OffHeapValueHolder<V>> factory = new EhcachePersistentSegmentFactory<>( source, storageEngineFactory, 64, evictionAdvisor, mapEvictionListener, true); return new EhcachePersistentConcurrentOffHeapClockCache<>(evictionAdvisor, factory, diskSegments); } @Override protected EhcacheOffHeapBackingMap<K, OffHeapValueHolder<V>> backingMap() { return map; } @Override protected SwitchableEvictionAdvisor<K, OffHeapValueHolder<V>> evictionAdvisor() { return evictionAdvisor; } private File getDataFile() { return new File(fileBasedPersistenceContext.getDirectory(), "ehcache-disk-store.data"); } private File getIndexFile() { return new File(fileBasedPersistenceContext.getDirectory(), "ehcache-disk-store.index"); } private File getMetadataFile() { return new File(fileBasedPersistenceContext.getDirectory(), "ehcache-disk-store.meta"); } @ServiceDependencies({TimeSourceService.class, SerializationProvider.class, ExecutionService.class, DiskResourceService.class}) public static class Provider extends BaseStoreProvider implements AuthoritativeTier.Provider { private final Map<OffHeapDiskStore<?, ?>, OperationStatistic<?>[]> tierOperationStatistics = new ConcurrentWeakIdentityHashMap<>(); private final Map<Store<?, ?>, PersistenceSpaceIdentifier<?>> createdStores = new ConcurrentWeakIdentityHashMap<>(); private final String defaultThreadPool; private volatile DiskResourceService diskPersistenceService; public Provider() { this(null); } public Provider(String threadPoolAlias) { this.defaultThreadPool = threadPoolAlias; } @Override protected ResourceType<SizedResourcePool> getResourceType() { return ResourceType.Core.DISK; } @Override public int rank(final Set<ResourceType<?>> resourceTypes, final Collection<ServiceConfiguration<?, ?>> serviceConfigs) { return resourceTypes.equals(Collections.singleton(getResourceType())) ? 1 : 0; } @Override public int rankAuthority(ResourceType<?> authorityResource, Collection<ServiceConfiguration<?, ?>> serviceConfigs) { return authorityResource.equals(getResourceType()) ? 1 : 0; } @Override public <K, V> OffHeapDiskStore<K, V> createStore(Configuration<K, V> storeConfig, ServiceConfiguration<?, ?>... serviceConfigs) { OffHeapDiskStore<K, V> store = createStoreInternal(storeConfig, new ThreadLocalStoreEventDispatcher<>(storeConfig.getDispatcherConcurrency()), serviceConfigs); tierOperationStatistics.put(store, new OperationStatistic<?>[] { createTranslatedStatistic(store, "get", TierOperationOutcomes.GET_TRANSLATION, "get"), createTranslatedStatistic(store, "eviction", TierOperationOutcomes.EVICTION_TRANSLATION, "eviction") }); return store; } private <K, V> OffHeapDiskStore<K, V> createStoreInternal(Configuration<K, V> storeConfig, StoreEventDispatcher<K, V> eventDispatcher, ServiceConfiguration<?, ?>... serviceConfigs) { if (getServiceProvider() == null) { throw new NullPointerException("ServiceProvider is null in OffHeapDiskStore.Provider."); } TimeSource timeSource = getServiceProvider().getService(TimeSourceService.class).getTimeSource(); ExecutionService executionService = getServiceProvider().getService(ExecutionService.class); SizedResourcePool diskPool = storeConfig.getResourcePools().getPoolForResource(getResourceType()); if (!(diskPool.getUnit() instanceof MemoryUnit)) { throw new IllegalArgumentException("OffHeapDiskStore only supports resources configuration expressed in \"memory\" unit"); } MemoryUnit unit = (MemoryUnit)diskPool.getUnit(); String threadPoolAlias; int writerConcurrency; int diskSegments; OffHeapDiskStoreConfiguration config = findSingletonAmongst(OffHeapDiskStoreConfiguration.class, (Object[]) serviceConfigs); if (config == null) { threadPoolAlias = defaultThreadPool; writerConcurrency = OffHeapDiskStoreConfiguration.DEFAULT_WRITER_CONCURRENCY; diskSegments = OffHeapDiskStoreConfiguration.DEFAULT_DISK_SEGMENTS; } else { threadPoolAlias = config.getThreadPoolAlias(); writerConcurrency = config.getWriterConcurrency(); diskSegments = config.getDiskSegments(); } PersistenceSpaceIdentifier<?> space = findSingletonAmongst(PersistenceSpaceIdentifier.class, (Object[]) serviceConfigs); if (space == null) { throw new IllegalStateException("No LocalPersistenceService could be found - did you configure it at the CacheManager level?"); } try { FileBasedPersistenceContext persistenceContext = diskPersistenceService.createPersistenceContextWithin(space , "offheap-disk-store"); OffHeapDiskStore<K, V> offHeapStore = new OffHeapDiskStore<>(persistenceContext, executionService, threadPoolAlias, writerConcurrency, diskSegments, storeConfig, timeSource, eventDispatcher, unit.toBytes(diskPool.getSize()), getServiceProvider().getService(StatisticsService.class)); createdStores.put(offHeapStore, space); return offHeapStore; } catch (CachePersistenceException cpex) { throw new RuntimeException("Unable to create persistence context in " + space, cpex); } } @Override public void releaseStore(Store<?, ?> resource) { if (createdStores.remove(resource) == null) { throw new IllegalArgumentException("Given store is not managed by this provider : " + resource); } try { OffHeapDiskStore<?, ?> offHeapDiskStore = (OffHeapDiskStore<?, ?>)resource; close(offHeapDiskStore); getStatisticsService().ifPresent(s -> s.cleanForNode(offHeapDiskStore)); tierOperationStatistics.remove(offHeapDiskStore); } catch (IOException e) { throw new RuntimeException(e); } } static <K, V> void close(final OffHeapDiskStore<K, V> resource) throws IOException { EhcachePersistentConcurrentOffHeapClockCache<K, OffHeapValueHolder<V>> localMap = resource.map; if (localMap != null) { resource.map = null; localMap.flush(); try (ObjectOutputStream output = new ObjectOutputStream(new FileOutputStream(resource.getIndexFile()))) { output.writeLong(System.currentTimeMillis()); localMap.persist(output); } localMap.close(); } } @Override public void initStore(Store<?, ?> resource) { PersistenceSpaceIdentifier<?> identifier = createdStores.get(resource); if (identifier == null) { throw new IllegalArgumentException("Given store is not managed by this provider : " + resource); } OffHeapDiskStore<?, ?> diskStore = (OffHeapDiskStore) resource; Serializer<?> keySerializer = diskStore.keySerializer; if (keySerializer instanceof StatefulSerializer) { StateRepository stateRepository; try { stateRepository = diskPersistenceService.getStateRepositoryWithin(identifier, "key-serializer"); } catch (CachePersistenceException e) { throw new RuntimeException(e); } ((StatefulSerializer)keySerializer).init(stateRepository); } Serializer<?> valueSerializer = diskStore.valueSerializer; if (valueSerializer instanceof StatefulSerializer) { StateRepository stateRepository; try { stateRepository = diskPersistenceService.getStateRepositoryWithin(identifier, "value-serializer"); } catch (CachePersistenceException e) { throw new RuntimeException(e); } ((StatefulSerializer)valueSerializer).init(stateRepository); } init(diskStore); } static <K, V> void init(final OffHeapDiskStore<K, V> resource) { resource.map = resource.getBackingMap(resource.sizeInBytes, resource.keySerializer, resource.valueSerializer, resource.evictionAdvisor); } @Override public void start(ServiceProvider<Service> serviceProvider) { super.start(serviceProvider); diskPersistenceService = serviceProvider.getService(DiskResourceService.class); if (diskPersistenceService == null) { throw new IllegalStateException("Unable to find file based persistence service"); } } @Override public void stop() { try { createdStores.clear(); diskPersistenceService = null; } finally { super.stop(); } } @Override public <K, V> AuthoritativeTier<K, V> createAuthoritativeTier(Configuration<K, V> storeConfig, ServiceConfiguration<?, ?>... serviceConfigs) { OffHeapDiskStore<K, V> authoritativeTier = createStoreInternal(storeConfig, new ThreadLocalStoreEventDispatcher<>(storeConfig .getDispatcherConcurrency()), serviceConfigs); tierOperationStatistics.put(authoritativeTier, new OperationStatistic<?>[] { createTranslatedStatistic(authoritativeTier, "get", TierOperationOutcomes.GET_AND_FAULT_TRANSLATION, "getAndFault"), createTranslatedStatistic(authoritativeTier, "eviction", TierOperationOutcomes.EVICTION_TRANSLATION, "eviction") }); return authoritativeTier; } @Override public void releaseAuthoritativeTier(AuthoritativeTier<?, ?> resource) { releaseStore(resource); } @Override public void initAuthoritativeTier(AuthoritativeTier<?, ?> resource) { initStore(resource); } } /* * This is kind of a hack, but it's safe to use this if the regular portability * is stateless. */ @SuppressWarnings("unchecked") public static <T> PersistentPortability<T> persistent(final Portability<T> normal) { if (normal instanceof PersistentPortability<?>) { return (PersistentPortability<T>) normal; } else { LinkedHashSet<Class<?>> proxyInterfaces = new LinkedHashSet<>(); for (Class<?> klazz = normal.getClass(); klazz != null; klazz = klazz.getSuperclass()) { proxyInterfaces.addAll(asList(klazz.getInterfaces())); } proxyInterfaces.add(PersistentPortability.class); return (PersistentPortability<T>) Proxy.newProxyInstance(normal.getClass().getClassLoader(), proxyInterfaces.toArray(new Class<?>[0]), (o, method, os) -> { if (method.getDeclaringClass().equals(Persistent.class)) { return null; } else { return method.invoke(normal, os); } }); } } String getThreadPoolAlias() { return threadPoolAlias; } int getWriterConcurrency() { return writerConcurrency; } int getDiskSegments() { return diskSegments; } }
/* * MIT License * * Copyright (c) 2019 WANG Lingsong * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package org.jsfr.json; import com.google.common.io.Resources; import org.hamcrest.CustomMatcher; import org.hamcrest.Description; import org.hamcrest.TypeSafeMatcher; import org.jsfr.json.compiler.JsonPathCompiler; import org.jsfr.json.provider.JavaCollectionProvider; import org.jsfr.json.provider.JsonProvider; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.io.InputStream; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyObject; import static org.mockito.Matchers.argThat; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.doNothing; import static org.mockito.Mockito.isA; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; public abstract class JsonSurferTest { protected final static Logger LOGGER = LoggerFactory.getLogger(JsonSurferTest.class); protected JsonSurfer surfer; protected JsonProvider provider; private JsonPathListener print = new JsonPathListener() { @Override public void onValue(Object value, ParsingContext context) { LOGGER.debug("Received value: {}", value); } }; protected InputStream read(String resourceName) throws IOException { return Resources.getResource(resourceName).openStream(); } protected String readAsString(String resourceName) throws IOException { return Resources.toString(Resources.getResource(resourceName), surfer.getParserCharset()); } @Test public void testTypeCasting() throws Exception { surfer.configBuilder() .bind("$.store.book[*]", new JsonPathListener() { @Override public void onValue(Object value, ParsingContext context) { assertNotNull(context.cast(value, Book.class)); } }) .bind("$.expensive", new JsonPathListener() { @Override public void onValue(Object value, ParsingContext context) { assertEquals(10L, context.cast(value, Long.class).longValue()); } }) .bind("$.store.book[0].price", new JsonPathListener() { @Override public void onValue(Object value, ParsingContext context) { assertEquals(8.95d, context.cast(value, Double.class), 0); } }) .bind("$.store.book[1].title", new JsonPathListener() { @Override public void onValue(Object value, ParsingContext context) { assertEquals("Sword of Honour", context.cast(value, String.class)); } }).buildAndSurf(read("sample.json")); } @Test public void testWildcardAtRoot() throws Exception { Collection<Object> collection = surfer.collectAll("[\n" + " {\n" + " \"type\" : \"iPhone\",\n" + " \"number\": \"0123-4567-8888\"\n" + " },\n" + " {\n" + " \"type\" : \"home\",\n" + " \"number\": \"0123-4567-8910\"\n" + " }\n" + " ]", JsonPathCompiler.compile("$.*")); LOGGER.debug("Collect all at root - {}", collection); assertEquals(2, collection.size()); } @Test public void testTypeBindingOne() throws Exception { Book book = surfer.collectOne(read("sample.json"), Book.class, JsonPathCompiler.compile("$..book[1]")); assertEquals("Evelyn Waugh", book.getAuthor()); } @Test public void testTypeBindingOneWithFilter() throws Exception { Book book = surfer.collectOne(read("sample.json"), Book.class, JsonPathCompiler.compile("$..book[?(@.category=='fiction')]"), JsonPathCompiler.compile("$..book[?(@.price>9)]")); assertEquals("Evelyn Waugh", book.getAuthor()); } @Test public void testTypeBindingCollection() throws Exception { Collection<Book> book = surfer.collectAll(read("sample.json"), Book.class, JsonPathCompiler.compile("$..book[*]")); assertEquals(4, book.size()); assertEquals("Nigel Rees", book.iterator().next().getAuthor()); } @Test public void testSurfingIterator() throws Exception { Iterator<Object> iterator = surfer.iterator(read("sample.json"), JsonPathCompiler.compile("$.store.book[*]")); int count = 0; while (iterator.hasNext()) { LOGGER.debug("Iterator next: {}", iterator.next()); count++; } assertEquals(4, count); } @Test public void testResumableParser() throws Exception { SurfingConfiguration config = surfer.configBuilder() .bind("$.store.book[0]", new JsonPathListener() { @Override public void onValue(Object value, ParsingContext context) { LOGGER.debug("First pause"); context.pause(); } }) .bind("$.store.book[1]", new JsonPathListener() { @Override public void onValue(Object value, ParsingContext context) { LOGGER.debug("Second pause"); context.pause(); } }).build(); ResumableParser parser = surfer.createResumableParser(read("sample.json"), config); assertFalse(parser.resume()); LOGGER.debug("Start parsing"); parser.parse(); LOGGER.debug("Resume from the first pause"); assertTrue(parser.resume()); LOGGER.debug("Resume from the second pause"); assertTrue(parser.resume()); LOGGER.debug("Parsing stopped"); assertFalse(parser.resume()); } @Test public void testTransientMap() throws Exception { surfer.configBuilder().bind("$.store.book[1]", new JsonPathListener() { @Override public void onValue(Object value, ParsingContext context) { context.save("foo", "bar"); } }).bind("$.store.book[2]", new JsonPathListener() { @Override public void onValue(Object value, ParsingContext context) { assertEquals("bar", context.load("foo", String.class)); } }).bind("$.store.book[0]", new JsonPathListener() { @Override public void onValue(Object value, ParsingContext context) { assertNull(context.load("foo", String.class)); } }).buildAndSurf(read("sample.json")); } @Test public void testJsonPathFilterEqualBoolean() throws Exception { JsonPathListener mockListener = mock(JsonPathListener.class); surfer.configBuilder().bind("$.store.book[?(@.marked==true)]", mockListener) .buildAndSurf(read("sample_filter.json")); verify(mockListener, times(1)).onValue(argThat(new CustomMatcher<Object>("test filter") { @Override public boolean matches(Object o) { return provider.primitive("Moby Dick").equals(provider.resolve(o, "title")); } }), any(ParsingContext.class)); } @Test public void testJsonPathFilterEqualNumber() throws Exception { JsonPathListener mockListener = mock(JsonPathListener.class); surfer.configBuilder().bind("$.store.book[?(@.price==8.95)]", mockListener) .buildAndSurf(read("sample_filter.json")); verify(mockListener, times(1)).onValue(argThat(new CustomMatcher<Object>("test filter") { @Override public boolean matches(Object o) { return provider.primitive("Sayings of the Century").equals(provider.resolve(o, "title")); } }), any(ParsingContext.class)); } @Test public void testJsonPathFilterGreaterThan() throws Exception { JsonPathListener mockListener = mock(JsonPathListener.class); surfer.configBuilder().bind("$.store.book[?(@.price>10)]", mockListener) .buildAndSurf(read("sample_filter.json")); verify(mockListener, times(1)).onValue(argThat(new CustomMatcher<Object>("test filter") { @Override public boolean matches(Object o) { return provider.primitive("Sword of Honour").equals(provider.resolve(o, "title")); } }), any(ParsingContext.class)); verify(mockListener, times(1)).onValue(argThat(new CustomMatcher<Object>("test filter") { @Override public boolean matches(Object o) { return provider.primitive("The Lord of the Rings").equals(provider.resolve(o, "title")); } }), any(ParsingContext.class)); } @Test public void testJsonPathFilterLessThan() throws Exception { JsonPathListener mockListener = mock(JsonPathListener.class); surfer.configBuilder().bind("$.store.book[?(@.price<10)]", mockListener) .buildAndSurf(read("sample.json")); Object book1 = provider.createObject(); provider.put(book1, "category", provider.primitive("reference")); provider.put(book1, "author", provider.primitive("Nigel Rees")); provider.put(book1, "title", provider.primitive("Sayings of the Century")); provider.put(book1, "price", provider.primitive(8.95)); verify(mockListener).onValue(eq(book1), any(ParsingContext.class)); Object book2 = provider.createObject(); provider.put(book2, "category", provider.primitive("fiction")); provider.put(book2, "author", provider.primitive("Herman Melville")); provider.put(book2, "title", provider.primitive("Moby Dick")); provider.put(book2, "isbn", provider.primitive("0-553-21311-3")); provider.put(book2, "price", provider.primitive(8.99)); verify(mockListener).onValue(eq(book2), any(ParsingContext.class)); verify(mockListener, times(2)).onValue(any(), any(ParsingContext.class)); } @Test public void testJsonPathFilterEqualString1() throws Exception { JsonPathListener mockListener = mock(JsonPathListener.class); surfer.configBuilder().bind("$.store.book[?(@.description.year=='2010')]", mockListener) .buildAndSurf(read("sample_filter.json")); verify(mockListener, times(1)).onValue(argThat(new CustomMatcher<Object>("Test filter") { @Override public boolean matches(Object o) { return provider.primitive("Sword of Honour").equals(provider.resolve(o, "title")); } }), any(ParsingContext.class)); } @Test public void testJsonPathFilterEqualString() throws Exception { JsonPathListener mockListener = mock(JsonPathListener.class); surfer.configBuilder().bind("$.store.book[?(@.category=='fiction')]", mockListener) .buildAndSurf(read("sample_filter.json")); verify(mockListener, times(1)).onValue(argThat(new CustomMatcher<Object>("test filter") { @Override public boolean matches(Object o) { return provider.primitive("Sword of Honour").equals(provider.resolve(o, "title")); } }), any(ParsingContext.class)); verify(mockListener, times(1)).onValue(argThat(new CustomMatcher<Object>("test filter") { @Override public boolean matches(Object o) { return provider.primitive("The Lord of the Rings").equals(provider.resolve(o, "title")); } }), any(ParsingContext.class)); verify(mockListener, times(1)).onValue(argThat(new CustomMatcher<Object>("test filter") { @Override public boolean matches(Object o) { return provider.primitive("Moby Dick").equals(provider.resolve(o, "title")); } }), any(ParsingContext.class)); } @Test public void testJsonPathFilterEqualStringWithDoubleQuote() throws Exception { JsonPathListener mockListener = mock(JsonPathListener.class); surfer.configBuilder().bind("$.store.book[?(@.category==\"fiction\")]", mockListener) .buildAndSurf(read("sample_filter.json")); verify(mockListener, times(1)).onValue(argThat(new CustomMatcher<Object>("test filter") { @Override public boolean matches(Object o) { return provider.primitive("Sword of Honour").equals(provider.resolve(o, "title")); } }), any(ParsingContext.class)); verify(mockListener, times(1)).onValue(argThat(new CustomMatcher<Object>("test filter") { @Override public boolean matches(Object o) { return provider.primitive("The Lord of the Rings").equals(provider.resolve(o, "title")); } }), any(ParsingContext.class)); verify(mockListener, times(1)).onValue(argThat(new CustomMatcher<Object>("test filter") { @Override public boolean matches(Object o) { return provider.primitive("Moby Dick").equals(provider.resolve(o, "title")); } }), any(ParsingContext.class)); } @Test public void testJsonPathFilterExistence() throws Exception { JsonPathListener mockListener = mock(JsonPathListener.class); surfer.configBuilder().bind("$.store.book[?(@.isbn)]", mockListener) .buildAndSurf(read("sample_filter.json")); verify(mockListener, times(1)).onValue(argThat(new CustomMatcher<Object>("test filter") { @Override public boolean matches(Object o) { return provider.primitive("The Lord of the Rings").equals(provider.resolve(o, "title")); } }), any(ParsingContext.class)); verify(mockListener, times(1)).onValue(argThat(new CustomMatcher<Object>("test filter") { @Override public boolean matches(Object o) { return provider.primitive("Moby Dick").equals(provider.resolve(o, "title")); } }), any(ParsingContext.class)); } @Test public void testJsonPathFilterExistence2() throws Exception { JsonPathListener mockListener = mock(JsonPathListener.class); surfer.configBuilder().bind("$.store.book[?(@.description)]", mockListener) .buildAndSurf(read("sample_filter.json")); verify(mockListener, times(1)).onValue(argThat(new CustomMatcher<Object>("test filter") { @Override public boolean matches(Object o) { return provider.primitive("Sword of Honour").equals(provider.resolve(o, "title")); } }), any(ParsingContext.class)); } @Test public void testJsonPathFilterNegation() throws Exception { JsonPathListener mockListener = mock(JsonPathListener.class); surfer.configBuilder().bind("$.store.book[?(!(@.isbn))]", mockListener) .buildAndSurf(read("sample_filter.json")); verify(mockListener, times(1)).onValue(argThat(new CustomMatcher<Object>("test filter") { @Override public boolean matches(Object o) { return provider.primitive("Sayings of the Century").equals(provider.resolve(o, "title")); } }), any(ParsingContext.class)); verify(mockListener, times(1)).onValue(argThat(new CustomMatcher<Object>("test filter") { @Override public boolean matches(Object o) { return provider.primitive("Sword of Honour").equals(provider.resolve(o, "title")); } }), any(ParsingContext.class)); } @Test public void testJsonPathFilterAggregate() throws Exception { JsonPathListener mockListener = mock(JsonPathListener.class); surfer.configBuilder().bind("$.store.book[?(@.price < 10 || @.category && @.isbn && !(@.price<10))]", mockListener) .buildAndSurf(read("sample_filter.json")); verify(mockListener, times(1)).onValue(argThat(new CustomMatcher<Object>("test filter") { @Override public boolean matches(Object o) { return provider.primitive("Sayings of the Century").equals(provider.resolve(o, "title")); } }), any(ParsingContext.class)); verify(mockListener, times(1)).onValue(argThat(new CustomMatcher<Object>("test filter") { @Override public boolean matches(Object o) { return provider.primitive("The Lord of the Rings").equals(provider.resolve(o, "title")); } }), any(ParsingContext.class)); verify(mockListener, times(1)).onValue(argThat(new CustomMatcher<Object>("test filter") { @Override public boolean matches(Object o) { return provider.primitive("Moby Dick").equals(provider.resolve(o, "title")); } }), any(ParsingContext.class)); } @Test public void testJsonPathFilterThenChild() throws Exception { JsonPathListener mockListener = mock(JsonPathListener.class); surfer.configBuilder().bind("$.store.book[?(@.description.year=='2010')].author", mockListener) .buildAndSurf(read("sample_filter2.json")); verify(mockListener, times(1)).onValue(eq(provider.primitive("Evelyn Waugh")), any(ParsingContext.class)); verify(mockListener, times(1)).onValue(eq(provider.primitive("Nigel Rees")), any(ParsingContext.class)); } @Test public void testJsonPathFilterThenChildWithDoubleQuote() throws Exception { JsonPathListener mockListener = mock(JsonPathListener.class); surfer.configBuilder().bind("$.store.book[?(@.description.year==\"2010\")].author", mockListener) .buildAndSurf(read("sample_filter2.json")); verify(mockListener, times(1)).onValue(eq(provider.primitive("Evelyn Waugh")), any(ParsingContext.class)); verify(mockListener, times(1)).onValue(eq(provider.primitive("Nigel Rees")), any(ParsingContext.class)); } @Test public void testJsonPathFilterThenChildDeepScan() throws Exception { JsonPathListener mockListener = mock(JsonPathListener.class); surfer.configBuilder().bind("$.store.book[?(@.price==8.95)]..year", mockListener) .buildAndSurf(read("sample_filter2.json")); verify(mockListener, times(1)).onValue(eq(provider.primitive("2010")), any(ParsingContext.class)); verify(mockListener, times(2)).onValue(eq(provider.primitive("1997")), any(ParsingContext.class)); verify(mockListener, times(1)).onValue(eq(provider.primitive("1998")), any(ParsingContext.class)); } @Test public void testJsonPathFilterAfterDeepScanAndThenChildDeepScan() throws Exception { JsonPathListener mockListener = mock(JsonPathListener.class); surfer.configBuilder().bind("$..book[?(@.price==8.95)]..year", mockListener) .buildAndSurf(read("sample_filter2.json")); verify(mockListener, times(1)).onValue(eq(provider.primitive("2010")), any(ParsingContext.class)); verify(mockListener, times(2)).onValue(eq(provider.primitive("1997")), any(ParsingContext.class)); verify(mockListener, times(1)).onValue(eq(provider.primitive("1998")), any(ParsingContext.class)); } @Test public void testJsonPathFilterAggregateThenChild() throws Exception { JsonPathListener mockListener = mock(JsonPathListener.class); surfer.configBuilder().bind("$.store.book[?(@.author=='Nigel Rees'||@.description.year=='2010')].title", mockListener) .buildAndSurf(read("sample_filter2.json")); verify(mockListener, times(2)).onValue(eq(provider.primitive("Sayings of the Century")), any(ParsingContext.class)); verify(mockListener, times(1)).onValue(eq(provider.primitive("Sword of Honour")), any(ParsingContext.class)); } @Test public void testJsonPathDoubleFilter() throws Exception { JsonPathListener mockListener = mock(JsonPathListener.class); surfer.configBuilder().bind("$.store.book[?(@.category=='fiction')].volumes[?(@.year=='1954')]", mockListener) .buildAndSurf(read("sample_filter2.json")); verify(mockListener, times(1)).onValue(argThat(new CustomMatcher<Object>("test filter") { @Override public boolean matches(Object o) { return provider.primitive("The Fellowship of the Ring").equals(provider.resolve(o, "title")); } }), any(ParsingContext.class)); verify(mockListener, times(1)).onValue(argThat(new CustomMatcher<Object>("test filter") { @Override public boolean matches(Object o) { return provider.primitive("The Two Towers").equals(provider.resolve(o, "title")); } }), any(ParsingContext.class)); verify(mockListener, times(0)).onValue(argThat(new CustomMatcher<Object>("test filter") { @Override public boolean matches(Object o) { return provider.primitive("The Return of the King").equals(provider.resolve(o, "title")); } }), any(ParsingContext.class)); } @Test public void testJsonPathDoubleFilterThenChild() throws Exception { JsonPathListener mockListener = mock(JsonPathListener.class); surfer.configBuilder().bind("$.store.book[?(@.category=='fiction')].volumes[?(@.year=='1954')].title", mockListener) .buildAndSurf(read("sample_filter2.json")); verify(mockListener, times(1)).onValue(eq(provider.primitive("The Fellowship of the Ring")), any(ParsingContext.class)); verify(mockListener, times(1)).onValue(eq(provider.primitive("The Two Towers")), any(ParsingContext.class)); verify(mockListener, times(0)).onValue(eq(provider.primitive("The Return of the King")), any(ParsingContext.class)); } @Test public void testJsonPathFilterNotMatch() throws Exception { JsonPathListener mockListener = mock(JsonPathListener.class); surfer.configBuilder().bind("$..book[?(@.category=='comic')]", mockListener) .buildAndSurf(read("sample_filter2.json")); verify(mockListener, times(0)).onValue(any(), any(ParsingContext.class)); } @Test public void testJsonPathFilterNotMatch2() throws Exception { JsonPathListener mockListener = mock(JsonPathListener.class); surfer.configBuilder().bind("$.store.book.title[?(@.title=='comic')]", mockListener) .buildAndSurf(read("sample_filter2.json")); verify(mockListener, times(0)).onValue(any(), any(ParsingContext.class)); } @Test public void testJsonPathDoubleFilterThenChildWithDeepscan() throws Exception { JsonPathListener mockListener = mock(JsonPathListener.class); surfer.configBuilder().bind("$..book[?(@.category=='fiction' && @.volumes[2].year=='1955')]..[?(@.year=='1954')]..title", mockListener) .buildAndSurf(read("sample_filter2.json")); verify(mockListener, times(1)).onValue(eq(provider.primitive("The Fellowship of the Ring")), any(ParsingContext.class)); verify(mockListener, times(1)).onValue(eq(provider.primitive("The Two Towers")), any(ParsingContext.class)); verify(mockListener, times(0)).onValue(eq(provider.primitive("The Return of the King")), any(ParsingContext.class)); } @Test public void testSampleJson() throws Exception { JsonPathListener mockListener = mock(JsonPathListener.class); surfer.configBuilder().bind("$.store.book[0].category", mockListener) .bind("$.store.book[0]", mockListener) .bind("$.store.car", mockListener) .bind("$.store.bicycle", mockListener) .buildAndSurf(read("sample.json")); Object book = provider.createObject(); provider.put(book, "category", provider.primitive("reference")); provider.put(book, "author", provider.primitive("Nigel Rees")); provider.put(book, "title", provider.primitive("Sayings of the Century")); provider.put(book, "price", provider.primitive(8.95)); verify(mockListener).onValue(eq(book), any(ParsingContext.class)); verify(mockListener).onValue(eq(provider.primitive("reference")), any(ParsingContext.class)); Object cars = provider.createArray(); provider.add(cars, provider.primitive("ferrari")); provider.add(cars, provider.primitive("lamborghini")); verify(mockListener).onValue(eq(cars), any(ParsingContext.class)); Object bicycle = provider.createObject(); provider.put(bicycle, "color", provider.primitive("red")); provider.put(bicycle, "price", provider.primitive(19.95d)); verify(mockListener).onValue(eq(bicycle), any(ParsingContext.class)); } @Test public void testSample2() throws Exception { JsonPathListener mockListener = mock(JsonPathListener.class); surfer.configBuilder() .bind("$[0].aiRuleEditorOriginal.+.barrierLevel", mockListener) .buildAndSurf(read("sample2.json")); verify(mockListener).onValue(eq(provider.primitive("0.8065")), any(ParsingContext.class)); } @Test public void testStoppableParsing() throws Exception { JsonPathListener mockListener = mock(JsonPathListener.class); doNothing().when(mockListener) .onValue(anyObject(), argThat(new TypeSafeMatcher<ParsingContext>() { @Override public boolean matchesSafely(ParsingContext parsingContext) { parsingContext.stop(); return true; } @Override public void describeTo(Description description) { } })); surfer.configBuilder() .bind("$.store.book[0,1,2]", mockListener) .bind("$.store.book[3]", mockListener) .buildAndSurf(read("sample.json")); verify(mockListener, times(1)) .onValue(anyObject(), any(ParsingContext.class)); } @Test public void testChildNodeWildcard() throws Exception { JsonPathListener mockListener = mock(JsonPathListener.class); surfer.configBuilder() .bind("$.store.*", mockListener) .buildAndSurf(read("sample.json")); verify(mockListener, times(3)) .onValue(anyObject(), any(ParsingContext.class)); } @Test public void testAnyIndex() throws Exception { JsonPathListener mockListener = mock(JsonPathListener.class); surfer.configBuilder() .bind("$.store.book[*]", mockListener) .buildAndSurf(read("sample.json")); verify(mockListener, times(4)) .onValue(anyObject(), any(ParsingContext.class)); } @Test public void testWildcardCombination() throws Exception { JsonPathListener mockListener = mock(JsonPathListener.class); surfer.configBuilder().bind("$.store.book[*].*", mockListener) .buildAndSurf(read("sample.json")); verify(mockListener, times(18)).onValue(anyObject(), any(ParsingContext.class)); } @Test public void testArraySlicing() throws Exception { JsonPathListener mock1 = mock(JsonPathListener.class); JsonPathListener mock2 = mock(JsonPathListener.class); JsonPathListener mock3 = mock(JsonPathListener.class); JsonPathListener mock4 = mock(JsonPathListener.class); surfer.configBuilder() .bind("$[:2]", mock1) .bind("$[0:2]", mock2) .bind("$[2:]", mock3) .bind("$[:]", mock4) .buildAndSurf(read("array.json")); verify(mock1, times(2)).onValue(anyObject(), any(ParsingContext.class)); verify(mock2, times(2)).onValue(anyObject(), any(ParsingContext.class)); verify(mock3, times(3)).onValue(anyObject(), any(ParsingContext.class)); verify(mock4, times(5)).onValue(anyObject(), any(ParsingContext.class)); } @Test public void testParsingArray() throws Exception { JsonPathListener wholeArray = mock(JsonPathListener.class); JsonPathListener stringElement = mock(JsonPathListener.class); JsonPathListener numberElement = mock(JsonPathListener.class); JsonPathListener booleanElement = mock(JsonPathListener.class); JsonPathListener nullElement = mock(JsonPathListener.class); JsonPathListener objectElement = mock(JsonPathListener.class); surfer.configBuilder().bind("$", wholeArray) .bind("$[0]", stringElement) .bind("$[1]", numberElement) .bind("$[2]", booleanElement) .bind("$[3]", nullElement) .bind("$[4]", objectElement) .buildAndSurf(read("array.json")); Object object = provider.createObject(); provider.put(object, "key", provider.primitive("value")); Object array = provider.createArray(); provider.add(array, provider.primitive("abc")); provider.add(array, provider.primitive(8.88)); provider.add(array, provider.primitive(true)); provider.add(array, provider.primitiveNull()); provider.add(array, object); verify(wholeArray).onValue(eq(array), any(ParsingContext.class)); verify(stringElement).onValue(eq(provider.primitive("abc")), any(ParsingContext.class)); verify(numberElement).onValue(eq(provider.primitive(8.88)), any(ParsingContext.class)); verify(booleanElement).onValue(eq(provider.primitive(true)), any(ParsingContext.class)); verify(nullElement).onValue(eq(provider.primitiveNull()), any(ParsingContext.class)); verify(objectElement).onValue(eq(object), any(ParsingContext.class)); } @Test public void testDeepScan() throws Exception { JsonPathListener mockListener = mock(JsonPathListener.class); surfer.configBuilder().bind("$..author", mockListener) .bind("$..store..bicycle..color", mockListener) .buildAndSurf(read("sample.json")); verify(mockListener).onValue(eq(provider.primitive("Nigel Rees")), any(ParsingContext.class)); verify(mockListener).onValue(eq(provider.primitive("Evelyn Waugh")), any(ParsingContext.class)); verify(mockListener).onValue(eq(provider.primitive("Herman Melville")), any(ParsingContext.class)); verify(mockListener).onValue(eq(provider.primitive("J. R. R. Tolkien")), any(ParsingContext.class)); verify(mockListener).onValue(eq(provider.primitive("red")), any(ParsingContext.class)); } @Test public void testDeepScan2() throws Exception { JsonPathListener mockListener = mock(JsonPathListener.class); surfer.configBuilder().bind("$..store..price", mockListener) .buildAndSurf(read("sample.json")); verify(mockListener).onValue(eq(provider.primitive(8.95)), any(ParsingContext.class)); verify(mockListener).onValue(eq(provider.primitive(12.99)), any(ParsingContext.class)); verify(mockListener).onValue(eq(provider.primitive(8.99)), any(ParsingContext.class)); verify(mockListener).onValue(eq(provider.primitive(22.99)), any(ParsingContext.class)); verify(mockListener).onValue(eq(provider.primitive(19.95)), any(ParsingContext.class)); } @Test public void testAny() throws Exception { JsonPathListener mockListener = mock(JsonPathListener.class); surfer.configBuilder().bind("$.store..bicycle..*", mockListener) .buildAndSurf(read("sample.json")); verify(mockListener).onValue(eq(provider.primitive("red")), any(ParsingContext.class)); verify(mockListener).onValue(eq(provider.primitive(19.95)), any(ParsingContext.class)); } @Test public void testFindEverything() throws Exception { surfer.configBuilder() .bind("$..*", new JsonPathListener() { @Override public void onValue(Object value, ParsingContext context) { LOGGER.trace("value: {}", value); } }) .buildAndSurf(read("sample.json")); } @Test public void testIndexesAndChildrenOperator() throws Exception { JsonPathListener mockListener = mock(JsonPathListener.class); surfer.configBuilder().bind("$..book[1,3]['author','title']", mockListener) .buildAndSurf(read("sample.json")); verify(mockListener).onValue(eq(provider.primitive("Evelyn Waugh")), any(ParsingContext.class)); verify(mockListener).onValue(eq(provider.primitive("Sword of Honour")), any(ParsingContext.class)); verify(mockListener).onValue(eq(provider.primitive("J. R. R. Tolkien")), any(ParsingContext.class)); verify(mockListener).onValue(eq(provider.primitive("The Lord of the Rings")), any(ParsingContext.class)); } @Test public void testCollectAllRaw() throws Exception { Collection<Object> values = surfer.collectAll(read("sample.json"), "$..book[1,3]['author','title']"); assertEquals(4, values.size()); Iterator<Object> itr = values.iterator(); itr.next(); assertEquals("Sword of Honour", itr.next()); } @Test public void testCollectOneRaw() throws Exception { Object value = surfer.collectOne(read("sample.json"), "$..book[1,3]['author','title']"); assertEquals("Evelyn Waugh", value); } @Test public void testCollectAll() throws Exception { Collection<String> values = surfer.collectAll(read("sample.json"), String.class, JsonPathCompiler.compile("$..book[1,3]['author', 'title']")); assertEquals(4, values.size()); assertEquals("Evelyn Waugh", values.iterator().next()); } @Test public void testCollectAllFromString() throws Exception { Collection<Object> values = surfer.collectAll(readAsString("sample.json"), "$..book[1,3]['author', 'title']"); assertEquals(4, values.size()); assertEquals("Evelyn Waugh", values.iterator().next()); } @Test public void testCollectOne() throws Exception { String value = surfer.collectOne(read("sample.json"), String.class, JsonPathCompiler.compile("$..book[1,3]['author','title']")); assertEquals("Evelyn Waugh", value); } @Test public void testCollectOneFromString() throws Exception { Object value = surfer.collectOne(readAsString("sample.json"), "$..book[1,3]['author','title']"); assertEquals("Evelyn Waugh", value); } @Test public void testGetCurrentFieldName() throws Exception { surfer.configBuilder() .bind("$.store.book[0].title", new JsonPathListener() { @Override public void onValue(Object value, ParsingContext context) { assertEquals(context.getCurrentFieldName(), "title"); } }) .bind("$.store.book[0]", new JsonPathListener() { @Override public void onValue(Object value, ParsingContext context) { assertNull(context.getCurrentFieldName()); } }) .bind("$.store.bicycle", new JsonPathListener() { @Override public void onValue(Object value, ParsingContext context) { assertEquals(context.getCurrentFieldName(), "bicycle"); } }) .buildAndSurf(read("sample.json")); } @Test public void testGetCurrentArrayIndex() throws Exception { surfer.configBuilder() .bind("$.store.book[3]", new JsonPathListener() { @Override public void onValue(Object value, ParsingContext context) { assertEquals(context.getCurrentArrayIndex(), 3); } }) .bind("$.store", new JsonPathListener() { @Override public void onValue(Object value, ParsingContext context) { assertEquals(context.getCurrentArrayIndex(), -1); } }) .buildAndSurf(read("sample.json")); } @Test public void testExample1() throws Exception { surfer.configBuilder().bind("$.store.book[*].author", print).buildAndSurf(read("sample.json")); } @Test public void testExample2() throws Exception { surfer.configBuilder().bind("$..author", print).buildAndSurf(read("sample.json")); } @Test public void testExample3() throws Exception { surfer.configBuilder().bind("$.store.*", print).buildAndSurf(read("sample.json")); } @Test public void testExample4() throws Exception { surfer.configBuilder().bind("$.store..price", print).buildAndSurf(read("sample.json")); } @Test public void testExample5() throws Exception { surfer.configBuilder().bind("$..book[2]", print).buildAndSurf(read("sample.json")); } @Test public void testExample6() throws Exception { surfer.configBuilder().bind("$..book[0,1]", print).buildAndSurf(read("sample.json")); } @Test public void testStoppable() throws Exception { surfer.configBuilder().bind("$..book[0,1]", new JsonPathListener() { @Override public void onValue(Object value, ParsingContext parsingContext) { parsingContext.stop(); //System.out.println(value); } }).buildAndSurf(read("sample.json")); } @Test public void testPlugableProvider() throws Exception { JsonPathListener mockListener = mock(JsonPathListener.class); surfer.configBuilder().withJsonProvider(JavaCollectionProvider.INSTANCE) .bind("$.store", mockListener) .buildAndSurf(read("sample.json")); verify(mockListener).onValue(isA(HashMap.class), any(ParsingContext.class)); } @Test public void testErrorStrategySuppressException() throws Exception { JsonPathListener mock = mock(JsonPathListener.class); doNothing().doThrow(Exception.class).doThrow(Exception.class).when(mock).onValue(anyObject(), any(ParsingContext.class)); surfer.configBuilder().bind("$.store.book[*]", mock) .withErrorStrategy(new ErrorHandlingStrategy() { @Override public void handleParsingException(Exception e) { // suppress exception } @Override public void handleExceptionFromListener(Exception e, ParsingContext context) { // suppress exception } }) .buildAndSurf(read("sample.json")); verify(mock, times(4)).onValue(anyObject(), any(ParsingContext.class)); } @Test public void testErrorStrategyThrowException() throws Exception { JsonPathListener mock = mock(JsonPathListener.class); doNothing().doThrow(Exception.class).doThrow(Exception.class).when(mock).onValue(anyObject(), any(ParsingContext.class)); try { surfer.configBuilder().bind("$.store.book[*]", mock).buildAndSurf(read("sample.json")); } catch (Exception e) { // catch mock exception } verify(mock, times(2)).onValue(anyObject(), any(ParsingContext.class)); } @Test public void testCollectOneFoundNothing() throws Exception { String jsonPathFoundNothing = "$..authors"; Object expireNull = surfer.collectOne(read("sample.json"), jsonPathFoundNothing); assertNull(expireNull); } @Test public void testJsonPathFilterMatchRegex() throws Exception { JsonPathListener mockListener = mock(JsonPathListener.class); surfer.configBuilder().bind("$.store.book[?(@.isbn=~/\\d-\\d\\d\\d-21311-\\d/)]", mockListener) .buildAndSurf(read("sample_filter.json")); verify(mockListener, times(1)).onValue(argThat(new CustomMatcher<Object>("Test filter") { @Override public boolean matches(Object o) { return provider.primitive("Moby Dick").equals(provider.resolve(o, "title")); } }), any(ParsingContext.class)); } @Test public void testJsonPathFilterMatchRegexFlags() throws Exception { JsonPathListener mockListener = mock(JsonPathListener.class); surfer.configBuilder().bind("$.store.book[?(@.author=~/tolkien/i)]", mockListener) // we assume other flags work too .buildAndSurf(read("sample_filter.json")); verify(mockListener, times(1)).onValue(argThat(new CustomMatcher<Object>("Test filter") { @Override public boolean matches(Object o) { return provider.primitive("The Lord of the Rings").equals(provider.resolve(o, "title")); } }), any(ParsingContext.class)); } @Test public void testJsonPathFilterWithMultipleBinding() throws Exception { JsonPathListener mockListener1 = mock(JsonPathListener.class); JsonPathListener mockListener2 = mock(JsonPathListener.class); JsonPathListener mockListener3 = mock(JsonPathListener.class); surfer.configBuilder() .bind("$.store.book[0,1]", mockListener1) .bind("$.store.book[?(@.author=='Herman Melville')]", mockListener2) .bind("$.store.book[?(@.author=='Nigel Rees')]", mockListener3) .buildAndSurf(read("sample_filter.json")); verify(mockListener1, times(2)).onValue(any(), any(ParsingContext.class)); verify(mockListener2, times(1)).onValue(any(), any(ParsingContext.class)); verify(mockListener3, times(1)).onValue(any(), any(ParsingContext.class)); } @Test public void testJsonPathFilterWithMultipleBindingAndSharedConfig() throws Exception { JsonPathListener mockListener1 = mock(JsonPathListener.class); JsonPathListener mockListener2 = mock(JsonPathListener.class); JsonPathListener mockListener3 = mock(JsonPathListener.class); JsonPathListener mockListener4 = mock(JsonPathListener.class); SurfingConfiguration config = surfer.configBuilder() .bind("$.store.book[0,1]", mockListener1) .bind("$.store.book[?(@.author=='Herman Melville')]", mockListener2) .bind("$.store.book[?(@.author=='Nigel Rees')]", mockListener3) .bind("$.store.book[?(@.volumes)]", mockListener4) .build(); surfer.surf(read("sample_filter.json"), config); surfer.surf(read("sample_filter2.json"), config); verify(mockListener1, times(4)).onValue(any(), any(ParsingContext.class)); verify(mockListener2, times(2)).onValue(any(), any(ParsingContext.class)); verify(mockListener3, times(3)).onValue(any(), any(ParsingContext.class)); verify(mockListener4, times(2)).onValue(any(), any(ParsingContext.class)); } @Test public void testMultipleBindingWithAndWithoutFilter() throws Exception { JsonPathListener mockListener1 = mock(JsonPathListener.class); JsonPathListener mockListener2 = mock(JsonPathListener.class); JsonPathListener mockListener3 = mock(JsonPathListener.class); surfer.configBuilder() .bind("$.store.book[?(@.category == 'reference')]", mockListener1) .bind("$.store.bicycle.color", mockListener2) .bind("$.store.bicycle", mockListener3) .buildAndSurf(read("sample.json")); verify(mockListener1, times(1)).onValue(any(), any(ParsingContext.class)); verify(mockListener2, times(1)).onValue(any(), any(ParsingContext.class)); verify(mockListener3, times(1)).onValue(any(), any(ParsingContext.class)); } @Test public void testCollector() throws Exception { Collector collector = surfer.collector(read("sample.json")); ValueBox<String> box1 = collector.collectOne("$.store.book[1].category", String.class); ValueBox<Object> box2 = collector.collectOne("$.store.book[2].isbn"); ValueBox<Collection<Object>> box3 = collector.collectAll("$.store.book[*]"); assertNull(box1.get()); assertNull(box2.get()); assertEquals(0, box3.get().size()); collector.exec(); assertEquals("fiction", box1.get()); assertEquals("0-553-21311-3", box2.get()); assertEquals(4, box3.get().size()); } @Test public void testArrayIndex() throws Exception { Collector collector = surfer.collector(read("array.json")); ValueBox<String> box1 = collector.collectOne("$.0", String.class); ValueBox<Boolean> box2 = collector.collectOne("$.2", Boolean.class); collector.exec(); assertEquals("abc", box1.get()); assertTrue(box2.get()); } @Test public void testFilterWithDoubleQuote() throws Exception { Collector collector = surfer.collector(read("sample.json")); ValueBox<String> box1 = collector.collectOne("$.store.book[?(@.author==\"J. R. R. Tolkien\")].title", String.class); collector.exec(); assertEquals("The Lord of the Rings", box1.get()); } }
/* * Licensed to GraphHopper and Peter Karich under one or more contributor * license agreements. See the NOTICE file distributed with this work for * additional information regarding copyright ownership. * * GraphHopper licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.graphhopper.reader; import static org.junit.Assert.*; import gnu.trove.list.TLongList; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.net.URISyntaxException; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; import org.junit.After; import org.junit.Before; import org.junit.Test; import com.graphhopper.GraphHopper; import com.graphhopper.reader.dem.ElevationProvider; import com.graphhopper.reader.dem.SRTMProvider; import com.graphhopper.routing.util.*; import com.graphhopper.storage.*; import com.graphhopper.util.EdgeExplorer; import com.graphhopper.util.EdgeIterator; import com.graphhopper.util.EdgeIteratorState; import com.graphhopper.util.GHUtility; import com.graphhopper.util.Helper; import com.graphhopper.util.shapes.GHPoint; import java.util.*; /** * Tests the OSMReader with the normal helper initialized. * <p/> * @author Peter Karich */ public class OSMReaderTest { private final String file1 = "test-osm.xml"; private final String file2 = "test-osm2.xml"; private final String file3 = "test-osm3.xml"; private final String file4 = "test-osm4.xml"; private final String fileNegIds = "test-osm-negative-ids.xml"; private final String fileBarriers = "test-barriers.xml"; private final String fileTurnRestrictions = "test-restrictions.xml"; private final String dir = "./target/tmp/test-db"; private CarFlagEncoder carEncoder; private BikeFlagEncoder bikeEncoder; private FlagEncoder footEncoder; private EdgeExplorer carOutExplorer; private EdgeExplorer carAllExplorer; @Before public void setUp() { new File(dir).mkdirs(); } @After public void tearDown() { Helper.removeDir(new File(dir)); } GraphHopperStorage newGraph( String directory, EncodingManager encodingManager, boolean is3D, boolean turnRestrictionsImport ) { boolean ch = false; return new GraphHopperStorage(ch, new RAMDirectory(directory, false), encodingManager, is3D, turnRestrictionsImport ? new TurnCostExtension() : new GraphExtension.NoOpExtension()); } class GraphHopperTest extends GraphHopper { public GraphHopperTest( String osmFile ) { this(osmFile, false); } public GraphHopperTest( String osmFile, boolean turnCosts ) { setStoreOnFlush(false); setOSMFile(osmFile); setGraphHopperLocation(dir); setEncodingManager(new EncodingManager("CAR,FOOT")); setCHEnable(false); if (turnCosts) { carEncoder = new CarFlagEncoder(5, 5, 3); bikeEncoder = new BikeFlagEncoder(4, 2, 3); } else { carEncoder = new CarFlagEncoder(); bikeEncoder = new BikeFlagEncoder(); } footEncoder = new FootFlagEncoder(); setEncodingManager(new EncodingManager(footEncoder, carEncoder, bikeEncoder)); } @Override protected DataReader createReader( GraphHopperStorage tmpGraph ) { return initOSMReader(new OSMReader(tmpGraph)); } @Override protected DataReader importData() throws IOException { GraphHopperStorage tmpGraph = newGraph(dir, getEncodingManager(), hasElevation(), getEncodingManager().needsTurnCostsSupport()); setGraphHopperStorage(tmpGraph); DataReader osmReader = createReader(tmpGraph); try { ((OSMReader) osmReader).setOSMFile(new File(getClass().getResource(getOSMFile()).toURI())); } catch (URISyntaxException e) { throw new RuntimeException(e); } osmReader.readGraph(); carOutExplorer = getGraphHopperStorage().createEdgeExplorer(new DefaultEdgeFilter(carEncoder, false, true)); carAllExplorer = getGraphHopperStorage().createEdgeExplorer(new DefaultEdgeFilter(carEncoder, true, true)); return osmReader; } } InputStream getResource( String file ) { return getClass().getResourceAsStream(file); } @Test public void testMain() { GraphHopper hopper = new GraphHopperTest(file1).importOrLoad(); GraphHopperStorage graph = hopper.getGraphHopperStorage(); assertNotNull(graph.getProperties().get("osmreader.import.date")); assertNotEquals("", graph.getProperties().get("osmreader.import.date")); assertEquals(4, graph.getNodes()); int n20 = AbstractGraphStorageTester.getIdOf(graph, 52); int n10 = AbstractGraphStorageTester.getIdOf(graph, 51.2492152); int n30 = AbstractGraphStorageTester.getIdOf(graph, 51.2); int n50 = AbstractGraphStorageTester.getIdOf(graph, 49); assertEquals(GHUtility.asSet(n20), GHUtility.getNeighbors(carOutExplorer.setBaseNode(n10))); assertEquals(3, GHUtility.count(carOutExplorer.setBaseNode(n20))); assertEquals(GHUtility.asSet(n20), GHUtility.getNeighbors(carOutExplorer.setBaseNode(n30))); EdgeIterator iter = carOutExplorer.setBaseNode(n20); assertTrue(iter.next()); assertEquals("street 123, B 122", iter.getName()); assertEquals(n50, iter.getAdjNode()); AbstractGraphStorageTester.assertPList(Helper.createPointList(51.25, 9.43), iter.fetchWayGeometry(0)); FlagEncoder flags = carEncoder; assertTrue(flags.isForward(iter.getFlags())); assertTrue(flags.isBackward(iter.getFlags())); assertTrue(iter.next()); assertEquals("route 666", iter.getName()); assertEquals(n30, iter.getAdjNode()); assertEquals(93147, iter.getDistance(), 1); assertTrue(iter.next()); assertEquals("route 666", iter.getName()); assertEquals(n10, iter.getAdjNode()); assertEquals(88643, iter.getDistance(), 1); assertTrue(flags.isForward(iter.getFlags())); assertTrue(flags.isBackward(iter.getFlags())); assertFalse(iter.next()); // get third added location id=30 iter = carOutExplorer.setBaseNode(n30); assertTrue(iter.next()); assertEquals("route 666", iter.getName()); assertEquals(n20, iter.getAdjNode()); assertEquals(93146.888, iter.getDistance(), 1); NodeAccess na = graph.getNodeAccess(); assertEquals(9.4, na.getLongitude(hopper.getLocationIndex().findID(51.2, 9.4)), 1e-3); assertEquals(10, na.getLongitude(hopper.getLocationIndex().findID(49, 10)), 1e-3); assertEquals(51.249, na.getLatitude(hopper.getLocationIndex().findID(51.2492152, 9.4317166)), 1e-3); // node 40 is on the way between 30 and 50 => 9.0 assertEquals(9, na.getLongitude(hopper.getLocationIndex().findID(51.25, 9.43)), 1e-3); } @Test public void testSort() { GraphHopper hopper = new GraphHopperTest(file1).setSortGraph(true).importOrLoad(); NodeAccess na = hopper.getGraphHopperStorage().getNodeAccess(); assertEquals(10, na.getLongitude(hopper.getLocationIndex().findID(49, 10)), 1e-3); assertEquals(51.249, na.getLatitude(hopper.getLocationIndex().findID(51.2492152, 9.4317166)), 1e-3); } @Test public void testWithBounds() { GraphHopper hopper = new GraphHopperTest(file1) { @Override protected DataReader createReader( GraphHopperStorage tmpGraph ) { return new OSMReader(tmpGraph) { @Override public boolean isInBounds( OSMNode node ) { return node.getLat() > 49 && node.getLon() > 8; } }.setEncodingManager(getEncodingManager()); } }; hopper.importOrLoad(); Graph graph = hopper.getGraphHopperStorage(); assertEquals(4, graph.getNodes()); int n10 = AbstractGraphStorageTester.getIdOf(graph, 51.2492152); int n20 = AbstractGraphStorageTester.getIdOf(graph, 52); int n30 = AbstractGraphStorageTester.getIdOf(graph, 51.2); int n40 = AbstractGraphStorageTester.getIdOf(graph, 51.25); assertEquals(GHUtility.asSet(n20), GHUtility.getNeighbors(carOutExplorer.setBaseNode(n10))); assertEquals(3, GHUtility.count(carOutExplorer.setBaseNode(n20))); assertEquals(GHUtility.asSet(n20), GHUtility.getNeighbors(carOutExplorer.setBaseNode(n30))); EdgeIterator iter = carOutExplorer.setBaseNode(n20); assertTrue(iter.next()); assertEquals(n40, iter.getAdjNode()); AbstractGraphStorageTester.assertPList(Helper.createPointList(), iter.fetchWayGeometry(0)); assertTrue(iter.next()); assertEquals(n30, iter.getAdjNode()); assertEquals(93146.888, iter.getDistance(), 1); assertTrue(iter.next()); AbstractGraphStorageTester.assertPList(Helper.createPointList(), iter.fetchWayGeometry(0)); assertEquals(n10, iter.getAdjNode()); assertEquals(88643, iter.getDistance(), 1); // get third added location => 2 iter = carOutExplorer.setBaseNode(n30); assertTrue(iter.next()); assertEquals(n20, iter.getAdjNode()); assertEquals(93146.888, iter.getDistance(), 1); assertFalse(iter.next()); } @Test public void testOneWay() { GraphHopper hopper = new GraphHopperTest(file2).importOrLoad(); Graph graph = hopper.getGraphHopperStorage(); int n20 = AbstractGraphStorageTester.getIdOf(graph, 52.0); int n22 = AbstractGraphStorageTester.getIdOf(graph, 52.133); int n23 = AbstractGraphStorageTester.getIdOf(graph, 52.144); int n10 = AbstractGraphStorageTester.getIdOf(graph, 51.2492152); int n30 = AbstractGraphStorageTester.getIdOf(graph, 51.2); assertEquals(1, GHUtility.count(carOutExplorer.setBaseNode(n10))); assertEquals(2, GHUtility.count(carOutExplorer.setBaseNode(n20))); assertEquals(0, GHUtility.count(carOutExplorer.setBaseNode(n30))); EdgeIterator iter = carOutExplorer.setBaseNode(n20); assertTrue(iter.next()); assertTrue(iter.next()); assertEquals(n30, iter.getAdjNode()); FlagEncoder encoder = carEncoder; iter = carAllExplorer.setBaseNode(n20); assertTrue(iter.next()); assertEquals(n23, iter.getAdjNode()); assertTrue(encoder.isForward(iter.getFlags())); assertFalse(encoder.isBackward(iter.getFlags())); assertTrue(iter.next()); assertEquals(n22, iter.getAdjNode()); assertFalse(encoder.isForward(iter.getFlags())); assertTrue(encoder.isBackward(iter.getFlags())); assertTrue(iter.next()); assertFalse(encoder.isForward(iter.getFlags())); assertTrue(encoder.isBackward(iter.getFlags())); assertTrue(iter.next()); assertEquals(n30, iter.getAdjNode()); assertTrue(encoder.isForward(iter.getFlags())); assertFalse(encoder.isBackward(iter.getFlags())); assertTrue(iter.next()); assertEquals(n10, iter.getAdjNode()); assertFalse(encoder.isForward(iter.getFlags())); assertTrue(encoder.isBackward(iter.getFlags())); } @Test public void testFerry() { GraphHopper hopper = new GraphHopperTest(file2) { @Override public void cleanUp() { } }.importOrLoad(); Graph graph = hopper.getGraphHopperStorage(); int n40 = AbstractGraphStorageTester.getIdOf(graph, 54.0); int n50 = AbstractGraphStorageTester.getIdOf(graph, 55.0); assertEquals(GHUtility.asSet(n40), GHUtility.getNeighbors(carAllExplorer.setBaseNode(n50))); // no duration is given => slow speed only! int n80 = AbstractGraphStorageTester.getIdOf(graph, 54.1); EdgeIterator iter = carOutExplorer.setBaseNode(n80); iter.next(); assertEquals(5, carEncoder.getSpeed(iter.getFlags()), 1e-1); // duration 01:10 is given => more precise speed calculation! // ~111km (from 54.0,10.1 to 55.0,10.2) in duration=70 minutes => 95km/h => / 1.4 => 71km/h iter = carOutExplorer.setBaseNode(n40); iter.next(); assertEquals(70, carEncoder.getSpeed(iter.getFlags()), 1e-1); } @Test public void testMaxSpeed() { GraphHopper hopper = new GraphHopperTest(file2) { @Override public void cleanUp() { } }.importOrLoad(); Graph graph = hopper.getGraphHopperStorage(); int n60 = AbstractGraphStorageTester.getIdOf(graph, 56.0); EdgeIterator iter = carOutExplorer.setBaseNode(n60); iter.next(); assertEquals(35, carEncoder.getSpeed(iter.getFlags()), 1e-1); } @Test public void testWayReferencesNotExistingAdjNode() { GraphHopper hopper = new GraphHopperTest(file4).importOrLoad(); Graph graph = hopper.getGraphHopperStorage(); assertEquals(2, graph.getNodes()); int n10 = AbstractGraphStorageTester.getIdOf(graph, 51.2492152); int n30 = AbstractGraphStorageTester.getIdOf(graph, 51.2); assertEquals(GHUtility.asSet(n30), GHUtility.getNeighbors(carOutExplorer.setBaseNode(n10))); } @Test public void testFoot() { GraphHopper hopper = new GraphHopperTest(file3).importOrLoad(); Graph graph = hopper.getGraphHopperStorage(); int n10 = AbstractGraphStorageTester.getIdOf(graph, 11.1); int n20 = AbstractGraphStorageTester.getIdOf(graph, 12); int n30 = AbstractGraphStorageTester.getIdOf(graph, 11.2); int n40 = AbstractGraphStorageTester.getIdOf(graph, 11.3); int n50 = AbstractGraphStorageTester.getIdOf(graph, 10); assertEquals(GHUtility.asSet(n20, n40), GHUtility.getNeighbors(carAllExplorer.setBaseNode(n10))); assertEquals(GHUtility.asSet(), GHUtility.getNeighbors(carOutExplorer.setBaseNode(n30))); assertEquals(GHUtility.asSet(n10, n30, n40), GHUtility.getNeighbors(carAllExplorer.setBaseNode(n20))); assertEquals(GHUtility.asSet(n30, n40), GHUtility.getNeighbors(carOutExplorer.setBaseNode(n20))); EdgeExplorer footOutExplorer = graph.createEdgeExplorer(new DefaultEdgeFilter(footEncoder, false, true)); assertEquals(GHUtility.asSet(n20, n50), GHUtility.getNeighbors(footOutExplorer.setBaseNode(n10))); assertEquals(GHUtility.asSet(n20, n50), GHUtility.getNeighbors(footOutExplorer.setBaseNode(n30))); assertEquals(GHUtility.asSet(n10, n30), GHUtility.getNeighbors(footOutExplorer.setBaseNode(n20))); } @Test public void testNegativeIds() { GraphHopper hopper = new GraphHopperTest(fileNegIds).importOrLoad(); Graph graph = hopper.getGraphHopperStorage(); assertEquals(4, graph.getNodes()); int n20 = AbstractGraphStorageTester.getIdOf(graph, 52); int n10 = AbstractGraphStorageTester.getIdOf(graph, 51.2492152); int n30 = AbstractGraphStorageTester.getIdOf(graph, 51.2); assertEquals(GHUtility.asSet(n20), GHUtility.getNeighbors(carOutExplorer.setBaseNode(n10))); assertEquals(3, GHUtility.count(carOutExplorer.setBaseNode(n20))); assertEquals(GHUtility.asSet(n20), GHUtility.getNeighbors(carOutExplorer.setBaseNode(n30))); EdgeIterator iter = carOutExplorer.setBaseNode(n20); assertTrue(iter.next()); assertTrue(iter.next()); assertEquals(n30, iter.getAdjNode()); assertEquals(93147, iter.getDistance(), 1); assertTrue(iter.next()); assertEquals(n10, iter.getAdjNode()); assertEquals(88643, iter.getDistance(), 1); } @Test public void testBarriers() { GraphHopper hopper = new GraphHopperTest(fileBarriers).importOrLoad(); Graph graph = hopper.getGraphHopperStorage(); assertEquals(8, graph.getNodes()); int n10 = AbstractGraphStorageTester.getIdOf(graph, 51); int n20 = AbstractGraphStorageTester.getIdOf(graph, 52); int n30 = AbstractGraphStorageTester.getIdOf(graph, 53); int n50 = AbstractGraphStorageTester.getIdOf(graph, 55); // separate id int new20 = 4; assertNotEquals(n20, new20); NodeAccess na = graph.getNodeAccess(); assertEquals(na.getLatitude(n20), na.getLatitude(new20), 1e-5); assertEquals(na.getLongitude(n20), na.getLongitude(new20), 1e-5); assertEquals(n20, hopper.getLocationIndex().findID(52, 9.4)); assertEquals(GHUtility.asSet(n20, n30), GHUtility.getNeighbors(carOutExplorer.setBaseNode(n10))); assertEquals(GHUtility.asSet(new20, n10, n50), GHUtility.getNeighbors(carOutExplorer.setBaseNode(n30))); EdgeIterator iter = carOutExplorer.setBaseNode(n20); assertTrue(iter.next()); assertEquals(n10, iter.getAdjNode()); assertFalse(iter.next()); iter = carOutExplorer.setBaseNode(new20); assertTrue(iter.next()); assertEquals(n30, iter.getAdjNode()); assertFalse(iter.next()); } @Test public void testBarriersOnTowerNodes() { GraphHopper hopper = new GraphHopperTest(fileBarriers).importOrLoad(); Graph graph = hopper.getGraphHopperStorage(); assertEquals(8, graph.getNodes()); int n60 = AbstractGraphStorageTester.getIdOf(graph, 56); int newId = 5; assertEquals(GHUtility.asSet(newId), GHUtility.getNeighbors(carOutExplorer.setBaseNode(n60))); EdgeIterator iter = carOutExplorer.setBaseNode(n60); assertTrue(iter.next()); assertEquals(newId, iter.getAdjNode()); assertFalse(iter.next()); iter = carOutExplorer.setBaseNode(newId); assertTrue(iter.next()); assertEquals(n60, iter.getAdjNode()); assertFalse(iter.next()); } @Test public void testRelation() { EncodingManager manager = new EncodingManager("bike"); GraphHopperStorage ghStorage = new GraphHopperStorage(new RAMDirectory(), manager, false); OSMReader reader = new OSMReader(ghStorage). setEncodingManager(manager); OSMRelation osmRel = new OSMRelation(1); osmRel.getMembers().add(new OSMRelation.Member(OSMRelation.WAY, 1, "")); osmRel.getMembers().add(new OSMRelation.Member(OSMRelation.WAY, 2, "")); osmRel.setTag("route", "bicycle"); osmRel.setTag("network", "lcn"); reader.prepareWaysWithRelationInfo(osmRel); long flags = reader.getRelFlagsMap().get(1); assertTrue(flags != 0); // do NOT overwrite with UNCHANGED osmRel.setTag("network", "mtb"); reader.prepareWaysWithRelationInfo(osmRel); long flags2 = reader.getRelFlagsMap().get(1); assertEquals(flags, flags2); // overwrite with outstanding osmRel.setTag("network", "ncn"); reader.prepareWaysWithRelationInfo(osmRel); long flags3 = reader.getRelFlagsMap().get(1); assertTrue(flags != flags3); } @Test public void testTurnRestrictions() { GraphHopper hopper = new GraphHopperTest(fileTurnRestrictions, true). importOrLoad(); Graph graph = hopper.getGraphHopperStorage(); assertEquals(15, graph.getNodes()); assertTrue(graph.getExtension() instanceof TurnCostExtension); TurnCostExtension tcStorage = (TurnCostExtension) graph.getExtension(); int n1 = AbstractGraphStorageTester.getIdOf(graph, 50, 10); int n2 = AbstractGraphStorageTester.getIdOf(graph, 52, 10); int n3 = AbstractGraphStorageTester.getIdOf(graph, 52, 11); int n4 = AbstractGraphStorageTester.getIdOf(graph, 52, 12); int n5 = AbstractGraphStorageTester.getIdOf(graph, 50, 12); int n6 = AbstractGraphStorageTester.getIdOf(graph, 51, 11); int n8 = AbstractGraphStorageTester.getIdOf(graph, 54, 11); int edge1_6 = GHUtility.getEdge(graph, n1, n6).getEdge(); int edge2_3 = GHUtility.getEdge(graph, n2, n3).getEdge(); int edge3_4 = GHUtility.getEdge(graph, n3, n4).getEdge(); int edge3_8 = GHUtility.getEdge(graph, n3, n8).getEdge(); int edge3_2 = GHUtility.getEdge(graph, n3, n2).getEdge(); int edge4_3 = GHUtility.getEdge(graph, n4, n3).getEdge(); int edge8_3 = GHUtility.getEdge(graph, n8, n3).getEdge(); // (2-3)->(3-4) only_straight_on = (2-3)->(3-8) restricted // (4-3)->(3-8) no_right_turn = (4-3)->(3-8) restricted assertTrue(carEncoder.getTurnCost(tcStorage.getTurnCostFlags(edge2_3, n3, edge3_8)) > 0); assertTrue(carEncoder.getTurnCost(tcStorage.getTurnCostFlags(edge4_3, n3, edge3_8)) > 0); assertFalse(carEncoder.isTurnRestricted(tcStorage.getTurnCostFlags(edge2_3, n3, edge3_4))); assertFalse(carEncoder.isTurnRestricted(tcStorage.getTurnCostFlags(edge2_3, n3, edge3_2))); assertFalse(carEncoder.isTurnRestricted(tcStorage.getTurnCostFlags(edge2_3, n3, edge3_4))); assertFalse(carEncoder.isTurnRestricted(tcStorage.getTurnCostFlags(edge4_3, n3, edge3_2))); assertFalse(carEncoder.isTurnRestricted(tcStorage.getTurnCostFlags(edge8_3, n3, edge3_2))); // u-turn restriction for (6-1)->(1-6) but not for (1-6)->(6-1) assertTrue(carEncoder.getTurnCost(tcStorage.getTurnCostFlags(edge1_6, n1, edge1_6)) > 0); assertFalse(carEncoder.isTurnRestricted(tcStorage.getTurnCostFlags(edge1_6, n6, edge1_6))); int edge4_5 = GHUtility.getEdge(graph, n4, n5).getEdge(); int edge5_6 = GHUtility.getEdge(graph, n5, n6).getEdge(); int edge5_1 = GHUtility.getEdge(graph, n5, n1).getEdge(); // (4-5)->(5-1) right_turn_only = (4-5)->(5-6) restricted long costsFlags = tcStorage.getTurnCostFlags(edge4_5, n5, edge5_6); assertFalse(carEncoder.isTurnRestricted(costsFlags)); assertTrue(carEncoder.getTurnCost(tcStorage.getTurnCostFlags(edge4_5, n5, edge5_1)) > 0); // for bike assertFalse(bikeEncoder.isTurnRestricted(costsFlags)); int n10 = AbstractGraphStorageTester.getIdOf(graph, 40, 10); int n11 = AbstractGraphStorageTester.getIdOf(graph, 40, 11); int n14 = AbstractGraphStorageTester.getIdOf(graph, 39, 11); int edge10_11 = GHUtility.getEdge(graph, n10, n11).getEdge(); int edge11_14 = GHUtility.getEdge(graph, n11, n14).getEdge(); assertEquals(0, tcStorage.getTurnCostFlags(edge11_14, n11, edge10_11)); costsFlags = tcStorage.getTurnCostFlags(edge10_11, n11, edge11_14); assertFalse(carEncoder.isTurnRestricted(costsFlags)); assertTrue(bikeEncoder.isTurnRestricted(costsFlags)); } @Test public void testEstimatedCenter() { final CarFlagEncoder encoder = new CarFlagEncoder() { private EncodedValue objectEncoder; @Override public int defineNodeBits( int index, int shift ) { shift = super.defineNodeBits(index, shift); objectEncoder = new EncodedValue("oEnc", shift, 2, 1, 0, 3, true); return shift + 2; } @Override public long handleNodeTags( OSMNode node ) { if (node.hasTag("test", "now")) return -objectEncoder.setValue(0, 1); return 0; } }; EncodingManager manager = new EncodingManager(encoder); GraphHopperStorage ghStorage = newGraph(dir, manager, false, false); final Map<Integer, Double> latMap = new HashMap<Integer, Double>(); final Map<Integer, Double> lonMap = new HashMap<Integer, Double>(); latMap.put(1, 1.1d); latMap.put(2, 1.2d); lonMap.put(1, 1.0d); lonMap.put(2, 1.0d); final AtomicInteger increased = new AtomicInteger(0); OSMReader osmreader = new OSMReader(ghStorage) { // mock data access @Override double getTmpLatitude( int id ) { return latMap.get(id); } @Override double getTmpLongitude( int id ) { return lonMap.get(id); } @Override Collection<EdgeIteratorState> addOSMWay( TLongList osmNodeIds, long wayFlags, long osmId ) { return Collections.emptyList(); } }; osmreader.setEncodingManager(manager); // save some node tags for first node OSMNode osmNode = new OSMNode(1, 1.1d, 1.0d); osmNode.setTag("test", "now"); osmreader.getNodeFlagsMap().put(1, encoder.handleNodeTags(osmNode)); OSMWay way = new OSMWay(1L); way.getNodes().add(1); way.getNodes().add(2); way.setTag("highway", "motorway"); osmreader.getNodeMap().put(1, 1); osmreader.getNodeMap().put(2, 2); osmreader.processWay(way); GHPoint p = way.getTag("estimated_center", null); assertEquals(1.15, p.lat, 1e-3); assertEquals(1.0, p.lon, 1e-3); Double d = way.getTag("estimated_distance", null); assertEquals(11119.5, d, 1e-1); } @Test public void testReadEleFromCustomOSM() { GraphHopper hopper = new GraphHopperTest("custom-osm-ele.xml") { @Override protected DataReader createReader( GraphHopperStorage tmpGraph ) { return initOSMReader(new OSMReader(tmpGraph) { @Override protected double getElevation( OSMNode node ) { return node.getEle(); } }); } }.setElevation(true).importOrLoad(); Graph graph = hopper.getGraphHopperStorage(); int n20 = AbstractGraphStorageTester.getIdOf(graph, 52); int n50 = AbstractGraphStorageTester.getIdOf(graph, 49); EdgeIteratorState edge = GHUtility.getEdge(graph, n20, n50); assertEquals(Helper.createPointList3D(52, 9, -10, 51.25, 9.43, 100, 49, 10, -30), edge.fetchWayGeometry(3)); } @Test public void testReadEleFromDataProvider() { GraphHopper hopper = new GraphHopperTest("test-osm5.xml"); // get N10E046.hgt.zip ElevationProvider provider = new SRTMProvider(); provider.setCacheDir(new File("./files")); hopper.setElevationProvider(provider); hopper.importOrLoad(); Graph graph = hopper.getGraphHopperStorage(); int n10 = AbstractGraphStorageTester.getIdOf(graph, 49.501); int n30 = AbstractGraphStorageTester.getIdOf(graph, 49.5011); int n50 = AbstractGraphStorageTester.getIdOf(graph, 49.5001); EdgeIteratorState edge = GHUtility.getEdge(graph, n50, n30); assertEquals(Helper.createPointList3D(49.5001, 11.501, 426, 49.5002, 11.5015, 441, 49.5011, 11.502, 410.0), edge.fetchWayGeometry(3)); edge = GHUtility.getEdge(graph, n10, n50); assertEquals(Helper.createPointList3D(49.501, 11.5001, 383.0, 49.5001, 11.501, 426.0), edge.fetchWayGeometry(3)); } /** * Tests the combination of different turn cost flags by different encoders. */ @Test public void testTurnFlagCombination() { final OSMTurnRelation.TurnCostTableEntry turnCostEntry_car = new OSMTurnRelation.TurnCostTableEntry(); final OSMTurnRelation.TurnCostTableEntry turnCostEntry_foot = new OSMTurnRelation.TurnCostTableEntry(); final OSMTurnRelation.TurnCostTableEntry turnCostEntry_bike = new OSMTurnRelation.TurnCostTableEntry(); CarFlagEncoder car = new CarFlagEncoder(5, 5, 24); FootFlagEncoder foot = new FootFlagEncoder(); BikeFlagEncoder bike = new BikeFlagEncoder(4, 2, 24); EncodingManager manager = new EncodingManager(Arrays.asList(bike, foot, car), 4); GraphHopperStorage ghStorage = new GraphBuilder(manager).create(); OSMReader reader = new OSMReader(ghStorage) { @Override public Collection<OSMTurnRelation.TurnCostTableEntry> analyzeTurnRelation( FlagEncoder encoder, OSMTurnRelation turnRelation ) { // simulate by returning one turn cost entry directly if (encoder.toString().equalsIgnoreCase("car")) { return Collections.singleton(turnCostEntry_car); } else if (encoder.toString().equalsIgnoreCase("foot")) { return Collections.singleton(turnCostEntry_foot); } else if (encoder.toString().equalsIgnoreCase("bike")) { return Collections.singleton(turnCostEntry_bike); } else { throw new IllegalArgumentException("illegal encoder " + encoder.toString()); } } }.setEncodingManager(manager); // turn cost entries for car and foot are for the same relations (same viaNode, edgeFrom and edgeTo), // turn cost entry for bike is for another relation (different viaNode) turnCostEntry_car.edgeFrom = 1; turnCostEntry_foot.edgeFrom = 1; turnCostEntry_bike.edgeFrom = 2; // calculating arbitrary flags using the encoders turnCostEntry_car.flags = car.getTurnFlags(true, 0); turnCostEntry_foot.flags = foot.getTurnFlags(true, 0); turnCostEntry_bike.flags = bike.getTurnFlags(false, 10); // we expect two different entries: the first one is a combination of turn flags of car and foot, // since they provide the same relation, the other one is for bike only long assertFlag1 = turnCostEntry_car.flags | turnCostEntry_foot.flags; long assertFlag2 = turnCostEntry_bike.flags; // combine flags of all encoders Collection<OSMTurnRelation.TurnCostTableEntry> entries = reader.analyzeTurnRelation(null); // we expect two different turnCost entries assertEquals(2, entries.size()); for (OSMTurnRelation.TurnCostTableEntry entry : entries) { if (entry.edgeFrom == 1) { // the first entry provides turn flags for car and foot only assertEquals(assertFlag1, entry.flags); assertTrue(car.isTurnRestricted(entry.flags)); assertFalse(foot.isTurnRestricted(entry.flags)); assertFalse(bike.isTurnRestricted(entry.flags)); assertTrue(Double.isInfinite(car.getTurnCost(entry.flags))); assertEquals(0, foot.getTurnCost(entry.flags), 1e-1); assertEquals(0, bike.getTurnCost(entry.flags), 1e-1); } else if (entry.edgeFrom == 2) { // the 2nd entry provides turn flags for bike only assertEquals(assertFlag2, entry.flags); assertFalse(car.isTurnRestricted(entry.flags)); assertFalse(foot.isTurnRestricted(entry.flags)); assertFalse(bike.isTurnRestricted(entry.flags)); assertEquals(0, car.getTurnCost(entry.flags), 1e-1); assertEquals(0, foot.getTurnCost(entry.flags), 1e-1); assertEquals(10, bike.getTurnCost(entry.flags), 1e-1); } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // -------------------------------------------------------------- // THIS IS A GENERATED SOURCE FILE. DO NOT EDIT! // GENERATED FROM org.apache.flink.api.java.tuple.TupleGenerator. // -------------------------------------------------------------- package org.apache.flink.api.java.tuple; import org.apache.flink.annotation.Public; import org.apache.flink.util.StringUtils; /** * A tuple with 9 fields. Tuples are strongly typed; each field may be of a separate type. The * fields of the tuple can be accessed directly as public fields (f0, f1, ...) or via their position * through the {@link #getField(int)} method. The tuple field positions start at zero. * * <p>Tuples are mutable types, meaning that their fields can be re-assigned. This allows functions * that work with Tuples to reuse objects in order to reduce pressure on the garbage collector. * * <p>Warning: If you subclass Tuple9, then be sure to either * * <ul> * <li>not add any new fields, or * <li>make it a POJO, and always declare the element type of your DataStreams/DataSets to your * descendant type. (That is, if you have a "class Foo extends Tuple9", then don't use * instances of Foo in a DataStream&lt;Tuple9&gt; / DataSet&lt;Tuple9&gt;, but declare it as * DataStream&lt;Foo&gt; / DataSet&lt;Foo&gt;.) * </ul> * * @see Tuple * @param <T0> The type of field 0 * @param <T1> The type of field 1 * @param <T2> The type of field 2 * @param <T3> The type of field 3 * @param <T4> The type of field 4 * @param <T5> The type of field 5 * @param <T6> The type of field 6 * @param <T7> The type of field 7 * @param <T8> The type of field 8 */ @Public public class Tuple9<T0, T1, T2, T3, T4, T5, T6, T7, T8> extends Tuple { private static final long serialVersionUID = 1L; /** Field 0 of the tuple. */ public T0 f0; /** Field 1 of the tuple. */ public T1 f1; /** Field 2 of the tuple. */ public T2 f2; /** Field 3 of the tuple. */ public T3 f3; /** Field 4 of the tuple. */ public T4 f4; /** Field 5 of the tuple. */ public T5 f5; /** Field 6 of the tuple. */ public T6 f6; /** Field 7 of the tuple. */ public T7 f7; /** Field 8 of the tuple. */ public T8 f8; /** Creates a new tuple where all fields are null. */ public Tuple9() {} /** * Creates a new tuple and assigns the given values to the tuple's fields. * * @param value0 The value for field 0 * @param value1 The value for field 1 * @param value2 The value for field 2 * @param value3 The value for field 3 * @param value4 The value for field 4 * @param value5 The value for field 5 * @param value6 The value for field 6 * @param value7 The value for field 7 * @param value8 The value for field 8 */ public Tuple9( T0 value0, T1 value1, T2 value2, T3 value3, T4 value4, T5 value5, T6 value6, T7 value7, T8 value8) { this.f0 = value0; this.f1 = value1; this.f2 = value2; this.f3 = value3; this.f4 = value4; this.f5 = value5; this.f6 = value6; this.f7 = value7; this.f8 = value8; } @Override public int getArity() { return 9; } @Override @SuppressWarnings("unchecked") public <T> T getField(int pos) { switch (pos) { case 0: return (T) this.f0; case 1: return (T) this.f1; case 2: return (T) this.f2; case 3: return (T) this.f3; case 4: return (T) this.f4; case 5: return (T) this.f5; case 6: return (T) this.f6; case 7: return (T) this.f7; case 8: return (T) this.f8; default: throw new IndexOutOfBoundsException(String.valueOf(pos)); } } @Override @SuppressWarnings("unchecked") public <T> void setField(T value, int pos) { switch (pos) { case 0: this.f0 = (T0) value; break; case 1: this.f1 = (T1) value; break; case 2: this.f2 = (T2) value; break; case 3: this.f3 = (T3) value; break; case 4: this.f4 = (T4) value; break; case 5: this.f5 = (T5) value; break; case 6: this.f6 = (T6) value; break; case 7: this.f7 = (T7) value; break; case 8: this.f8 = (T8) value; break; default: throw new IndexOutOfBoundsException(String.valueOf(pos)); } } /** * Sets new values to all fields of the tuple. * * @param value0 The value for field 0 * @param value1 The value for field 1 * @param value2 The value for field 2 * @param value3 The value for field 3 * @param value4 The value for field 4 * @param value5 The value for field 5 * @param value6 The value for field 6 * @param value7 The value for field 7 * @param value8 The value for field 8 */ public void setFields( T0 value0, T1 value1, T2 value2, T3 value3, T4 value4, T5 value5, T6 value6, T7 value7, T8 value8) { this.f0 = value0; this.f1 = value1; this.f2 = value2; this.f3 = value3; this.f4 = value4; this.f5 = value5; this.f6 = value6; this.f7 = value7; this.f8 = value8; } // ------------------------------------------------------------------------------------------------- // standard utilities // ------------------------------------------------------------------------------------------------- /** * Creates a string representation of the tuple in the form (f0, f1, f2, f3, f4, f5, f6, f7, * f8), where the individual fields are the value returned by calling {@link Object#toString} on * that field. * * @return The string representation of the tuple. */ @Override public String toString() { return "(" + StringUtils.arrayAwareToString(this.f0) + "," + StringUtils.arrayAwareToString(this.f1) + "," + StringUtils.arrayAwareToString(this.f2) + "," + StringUtils.arrayAwareToString(this.f3) + "," + StringUtils.arrayAwareToString(this.f4) + "," + StringUtils.arrayAwareToString(this.f5) + "," + StringUtils.arrayAwareToString(this.f6) + "," + StringUtils.arrayAwareToString(this.f7) + "," + StringUtils.arrayAwareToString(this.f8) + ")"; } /** * Deep equality for tuples by calling equals() on the tuple members. * * @param o the object checked for equality * @return true if this is equal to o. */ @Override public boolean equals(Object o) { if (this == o) { return true; } if (!(o instanceof Tuple9)) { return false; } @SuppressWarnings("rawtypes") Tuple9 tuple = (Tuple9) o; if (f0 != null ? !f0.equals(tuple.f0) : tuple.f0 != null) { return false; } if (f1 != null ? !f1.equals(tuple.f1) : tuple.f1 != null) { return false; } if (f2 != null ? !f2.equals(tuple.f2) : tuple.f2 != null) { return false; } if (f3 != null ? !f3.equals(tuple.f3) : tuple.f3 != null) { return false; } if (f4 != null ? !f4.equals(tuple.f4) : tuple.f4 != null) { return false; } if (f5 != null ? !f5.equals(tuple.f5) : tuple.f5 != null) { return false; } if (f6 != null ? !f6.equals(tuple.f6) : tuple.f6 != null) { return false; } if (f7 != null ? !f7.equals(tuple.f7) : tuple.f7 != null) { return false; } if (f8 != null ? !f8.equals(tuple.f8) : tuple.f8 != null) { return false; } return true; } @Override public int hashCode() { int result = f0 != null ? f0.hashCode() : 0; result = 31 * result + (f1 != null ? f1.hashCode() : 0); result = 31 * result + (f2 != null ? f2.hashCode() : 0); result = 31 * result + (f3 != null ? f3.hashCode() : 0); result = 31 * result + (f4 != null ? f4.hashCode() : 0); result = 31 * result + (f5 != null ? f5.hashCode() : 0); result = 31 * result + (f6 != null ? f6.hashCode() : 0); result = 31 * result + (f7 != null ? f7.hashCode() : 0); result = 31 * result + (f8 != null ? f8.hashCode() : 0); return result; } /** * Shallow tuple copy. * * @return A new Tuple with the same fields as this. */ @Override @SuppressWarnings("unchecked") public Tuple9<T0, T1, T2, T3, T4, T5, T6, T7, T8> copy() { return new Tuple9<>( this.f0, this.f1, this.f2, this.f3, this.f4, this.f5, this.f6, this.f7, this.f8); } /** * Creates a new tuple and assigns the given values to the tuple's fields. This is more * convenient than using the constructor, because the compiler can infer the generic type * arguments implicitly. For example: {@code Tuple3.of(n, x, s)} instead of {@code new * Tuple3<Integer, Double, String>(n, x, s)} */ public static <T0, T1, T2, T3, T4, T5, T6, T7, T8> Tuple9<T0, T1, T2, T3, T4, T5, T6, T7, T8> of( T0 value0, T1 value1, T2 value2, T3 value3, T4 value4, T5 value5, T6 value6, T7 value7, T8 value8) { return new Tuple9<>(value0, value1, value2, value3, value4, value5, value6, value7, value8); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.datastructures; import java.util.Collection; import java.util.Timer; import java.util.TimerTask; import java.util.UUID; import java.util.concurrent.BrokenBarrierException; import java.util.concurrent.Callable; import java.util.concurrent.ConcurrentSkipListSet; import java.util.concurrent.CyclicBarrier; import java.util.concurrent.Semaphore; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import org.apache.ignite.Ignite; import org.apache.ignite.IgniteAtomicLong; import org.apache.ignite.IgniteAtomicReference; import org.apache.ignite.IgniteAtomicSequence; import org.apache.ignite.IgniteAtomicStamped; import org.apache.ignite.IgniteCountDownLatch; import org.apache.ignite.IgniteException; import org.apache.ignite.IgniteInterruptedException; import org.apache.ignite.IgniteLock; import org.apache.ignite.IgniteQueue; import org.apache.ignite.IgniteSemaphore; import org.apache.ignite.cache.CacheMode; import org.apache.ignite.configuration.AtomicConfiguration; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.internal.IgniteEx; import org.apache.ignite.internal.IgniteInternalFuture; import org.apache.ignite.internal.util.GridLeanSet; import org.apache.ignite.internal.util.typedef.CA; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.G; import org.apache.ignite.internal.util.typedef.PA; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.lang.IgniteBiTuple; import org.apache.ignite.lang.IgniteCallable; import org.apache.ignite.lang.IgniteClosure; import org.apache.ignite.lang.IgniteFuture; import org.apache.ignite.resources.IgniteInstanceResource; import org.apache.ignite.spi.communication.tcp.TcpCommunicationSpi; import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi; import org.apache.ignite.testframework.GridTestUtils; import static org.apache.ignite.cache.CacheAtomicityMode.TRANSACTIONAL; import static org.apache.ignite.testframework.GridTestUtils.waitForCondition; /** * Failover tests for cache data structures. */ public abstract class GridCacheAbstractDataStructuresFailoverSelfTest extends IgniteCollectionAbstractTest { /** */ private static final long TEST_TIMEOUT = 3 * 60 * 1000; /** */ private static final String NEW_IGNITE_INSTANCE_NAME = "newGrid"; /** */ private static final String STRUCTURE_NAME = "structure"; /** */ private static final String TRANSACTIONAL_CACHE_NAME = "tx_cache"; /** */ private static final int TOP_CHANGE_CNT = 2; /** */ private static final int TOP_CHANGE_THREAD_CNT = 2; /** */ private boolean client; /** {@inheritDoc} */ @Override protected long getTestTimeout() { return TEST_TIMEOUT; } /** * @return Grids count to start. */ @Override public int gridCount() { return 3; } /** {@inheritDoc} */ @Override protected void beforeTestsStarted() throws Exception { // No-op } /** {@inheritDoc} */ @Override protected void afterTestsStopped() throws Exception { // No-op } /** {@inheritDoc} */ @Override protected void beforeTest() throws Exception { startGridsMultiThreaded(gridCount()); super.beforeTest(); } /** {@inheritDoc} */ @Override protected void afterTest() throws Exception { stopAllGrids(); } /** {@inheritDoc} */ @Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception { IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName); ((TcpCommunicationSpi)cfg.getCommunicationSpi()).setSharedMemoryPort(-1); AtomicConfiguration atomicCfg = new AtomicConfiguration(); atomicCfg.setCacheMode(collectionCacheMode()); atomicCfg.setBackups(collectionConfiguration().getBackups()); cfg.setAtomicConfiguration(atomicCfg); CacheConfiguration ccfg = new CacheConfiguration(DEFAULT_CACHE_NAME); ccfg.setName(TRANSACTIONAL_CACHE_NAME); ccfg.setAtomicityMode(TRANSACTIONAL); cfg.setCacheConfiguration(ccfg); if (client) { cfg.setClientMode(client); ((TcpDiscoverySpi)(cfg.getDiscoverySpi())).setForceServerMode(true); } return cfg; } /** * @throws Exception If failed. */ public void testAtomicLongFailsWhenServersLeft() throws Exception { client = true; Ignite ignite = startGrid(gridCount()); new Timer().schedule(new TimerTask() { @Override public void run() { for (int i = 0; i < gridCount(); i++) stopGrid(i); } }, 10_000); long stopTime = U.currentTimeMillis() + TEST_TIMEOUT / 2; IgniteAtomicLong atomic = ignite.atomicLong(STRUCTURE_NAME, 10, true); try { while (U.currentTimeMillis() < stopTime) assertEquals(10, atomic.get()); } catch (IgniteException ignore) { return; // Test that client does not hang. } fail(); } /** * @throws Exception If failed. */ public void testAtomicLongTopologyChange() throws Exception { try (IgniteAtomicLong atomic = grid(0).atomicLong(STRUCTURE_NAME, 10, true)) { Ignite g = startGrid(NEW_IGNITE_INSTANCE_NAME); assertEquals(10, g.atomicLong(STRUCTURE_NAME, 10, false).get()); assertEquals(20, g.atomicLong(STRUCTURE_NAME, 10, false).addAndGet(10)); stopGrid(NEW_IGNITE_INSTANCE_NAME); assertEquals(20, grid(0).atomicLong(STRUCTURE_NAME, 10, true).get()); } } /** * @throws Exception If failed. */ public void testAtomicLongConstantTopologyChange() throws Exception { doTestAtomicLong(new ConstantTopologyChangeWorker(TOP_CHANGE_THREAD_CNT)); } /** * @throws Exception If failed. */ public void testAtomicLongConstantMultipleTopologyChange() throws Exception { doTestAtomicLong(multipleTopologyChangeWorker(TOP_CHANGE_THREAD_CNT)); } /** * Tests IgniteAtomicLong. * * @param topWorker Topology change worker. * @throws Exception If failed. */ private void doTestAtomicLong(ConstantTopologyChangeWorker topWorker) throws Exception { try (IgniteAtomicLong s = grid(0).atomicLong(STRUCTURE_NAME, 1, true)) { IgniteInternalFuture<?> fut = topWorker.startChangingTopology(new IgniteClosure<Ignite, Object>() { @Override public Object apply(Ignite ignite) { assert ignite.atomicLong(STRUCTURE_NAME, 1, true).get() > 0; return null; } }); long val = s.get(); while (!fut.isDone()) { assertEquals(val, s.get()); assertEquals(++val, s.incrementAndGet()); } fut.get(); for (Ignite g : G.allGrids()) assertEquals(val, g.atomicLong(STRUCTURE_NAME, 1, false).get()); } } /** * @throws Exception If failed. */ public void testAtomicReferenceTopologyChange() throws Exception { try (IgniteAtomicReference atomic = grid(0).atomicReference(STRUCTURE_NAME, 10, true)) { Ignite g = startGrid(NEW_IGNITE_INSTANCE_NAME); assertEquals((Integer)10, g.atomicReference(STRUCTURE_NAME, 10, false).get()); g.atomicReference(STRUCTURE_NAME, 10, false).set(20); stopGrid(NEW_IGNITE_INSTANCE_NAME); assertEquals((Integer)20, grid(0).atomicReference(STRUCTURE_NAME, 10, true).get()); } } /** * @throws Exception If failed. */ public void testAtomicReferenceConstantTopologyChange() throws Exception { doTestAtomicReference(new ConstantTopologyChangeWorker(TOP_CHANGE_THREAD_CNT)); } /** * @throws Exception If failed. */ public void testAtomicReferenceConstantMultipleTopologyChange() throws Exception { doTestAtomicReference(multipleTopologyChangeWorker(TOP_CHANGE_THREAD_CNT)); } /** * Tests atomic reference. * * @param topWorker Topology change worker. * @throws Exception If failed. */ private void doTestAtomicReference(ConstantTopologyChangeWorker topWorker) throws Exception { try (IgniteAtomicReference<Integer> s = grid(0).atomicReference(STRUCTURE_NAME, 1, true)) { IgniteInternalFuture<?> fut = topWorker.startChangingTopology(new IgniteClosure<Ignite, Object>() { @Override public Object apply(Ignite ignite) { assert ignite.atomicReference(STRUCTURE_NAME, 1, false).get() > 0; return null; } }); int val = s.get(); while (!fut.isDone()) { assertEquals(val, (int)s.get()); s.set(++val); } fut.get(); for (Ignite g : G.allGrids()) assertEquals(val, (int)g.atomicReference(STRUCTURE_NAME, 1, true).get()); } } /** * @throws Exception If failed. */ public void testAtomicStampedTopologyChange() throws Exception { try (IgniteAtomicStamped atomic = grid(0).atomicStamped(STRUCTURE_NAME, 10, 10, true)) { Ignite g = startGrid(NEW_IGNITE_INSTANCE_NAME); IgniteBiTuple<Integer, Integer> t = g.atomicStamped(STRUCTURE_NAME, 10, 10, false).get(); assertEquals((Integer)10, t.get1()); assertEquals((Integer)10, t.get2()); g.atomicStamped(STRUCTURE_NAME, 10, 10, false).set(20, 20); stopGrid(NEW_IGNITE_INSTANCE_NAME); t = grid(0).atomicStamped(STRUCTURE_NAME, 10, 10, false).get(); assertEquals((Integer)20, t.get1()); assertEquals((Integer)20, t.get2()); } } /** * @throws Exception If failed. */ public void testAtomicStampedConstantTopologyChange() throws Exception { doTestAtomicStamped(new ConstantTopologyChangeWorker(TOP_CHANGE_THREAD_CNT)); } /** * @throws Exception If failed. */ public void testAtomicStampedConstantMultipleTopologyChange() throws Exception { doTestAtomicStamped(multipleTopologyChangeWorker(TOP_CHANGE_THREAD_CNT)); } /** * Tests atomic stamped value. * * @param topWorker Topology change worker. * @throws Exception If failed. */ private void doTestAtomicStamped(ConstantTopologyChangeWorker topWorker) throws Exception { try (IgniteAtomicStamped<Integer, Integer> s = grid(0).atomicStamped(STRUCTURE_NAME, 1, 1, true)) { IgniteInternalFuture<?> fut = topWorker.startChangingTopology(new IgniteClosure<Ignite, Object>() { @Override public Object apply(Ignite ignite) { IgniteBiTuple<Integer, Integer> t = ignite.atomicStamped(STRUCTURE_NAME, 1, 1, false).get(); assert t.get1() > 0; assert t.get2() > 0; return null; } }); int val = s.value(); while (!fut.isDone()) { IgniteBiTuple<Integer, Integer> t = s.get(); assertEquals(val, (int)t.get1()); assertEquals(val, (int)t.get2()); ++val; s.set(val, val); } fut.get(); for (Ignite g : G.allGrids()) { IgniteBiTuple<Integer, Integer> t = g.atomicStamped(STRUCTURE_NAME, 1, 1, false).get(); assertEquals(val, (int)t.get1()); assertEquals(val, (int)t.get2()); } } } /** * @throws Exception If failed. */ public void testCountDownLatchTopologyChange() throws Exception { try (IgniteCountDownLatch latch = grid(0).countDownLatch(STRUCTURE_NAME, 20, true, true)) { try { Ignite g = startGrid(NEW_IGNITE_INSTANCE_NAME); assertEquals(20, g.countDownLatch(STRUCTURE_NAME, 20, true, false).count()); g.countDownLatch(STRUCTURE_NAME, 20, true, false).countDown(10); stopGrid(NEW_IGNITE_INSTANCE_NAME); assertEquals(10, grid(0).countDownLatch(STRUCTURE_NAME, 20, true, false).count()); } finally { grid(0).countDownLatch(STRUCTURE_NAME, 20, true, false).countDownAll(); } } } /** * @throws Exception If failed. */ public void testSemaphoreFailoverSafe() throws Exception { try (final IgniteSemaphore semaphore = grid(0).semaphore(STRUCTURE_NAME, 20, true, true)) { Ignite g = startGrid(NEW_IGNITE_INSTANCE_NAME); IgniteSemaphore semaphore2 = g.semaphore(STRUCTURE_NAME, 20, true, false); assertEquals(20, semaphore2.availablePermits()); semaphore2.acquire(10); stopGrid(NEW_IGNITE_INSTANCE_NAME); waitForCondition(new PA() { @Override public boolean apply() { return semaphore.availablePermits() == 20; } }, 2000); } } /** * @throws Exception If failed. */ public void testSemaphoreNonFailoverSafe() throws Exception { try (IgniteSemaphore sem = grid(0).semaphore(STRUCTURE_NAME, 20, false, true)) { Ignite g = startGrid(NEW_IGNITE_INSTANCE_NAME); IgniteSemaphore sem2 = g.semaphore(STRUCTURE_NAME, 20, false, false); sem2.acquire(20); assertEquals(0, sem.availablePermits()); new Timer().schedule(new TimerTask() { @Override public void run() { stopGrid(NEW_IGNITE_INSTANCE_NAME); } }, 2000); try { sem.acquire(1); } catch (IgniteInterruptedException ignored) { // Expected exception. return; } } fail("Thread hasn't been interrupted"); } /** * @throws Exception If failed. */ public void testSemaphoreSingleNodeFailure() throws Exception { final Ignite i1 = grid(0); IgniteSemaphore sem1 = i1.semaphore(STRUCTURE_NAME, 1, false, true); sem1.acquire(); IgniteInternalFuture<?> fut = GridTestUtils.runAsync(new Callable<Void>() { @Override public Void call() throws Exception { boolean failed = true; IgniteSemaphore sem2 = i1.semaphore(STRUCTURE_NAME, 1, false, true); try { sem2.acquire(); } catch (Exception ignored){ failed = false; } finally { assertFalse(failed); sem2.release(); } return null; } }); while(!sem1.hasQueuedThreads()){ try { Thread.sleep(1); } catch (InterruptedException ignored) { fail(); } } i1.close(); fut.get(); } /** * @throws Exception If failed. */ public void testSemaphoreConstantTopologyChangeFailoverSafe() throws Exception { doTestSemaphore(new ConstantTopologyChangeWorker(TOP_CHANGE_THREAD_CNT), true); } /** * @throws Exception If failed. */ public void testSemaphoreConstantTopologyChangeNonFailoverSafe() throws Exception { doTestSemaphore(new ConstantTopologyChangeWorker(TOP_CHANGE_THREAD_CNT), false); } /** * @throws Exception If failed. */ public void testSemaphoreMultipleTopologyChangeFailoverSafe() throws Exception { doTestSemaphore(multipleTopologyChangeWorker(TOP_CHANGE_THREAD_CNT), true); } /** * @throws Exception If failed. */ public void testSemaphoreMultipleTopologyChangeNonFailoverSafe() throws Exception { doTestSemaphore(multipleTopologyChangeWorker(TOP_CHANGE_THREAD_CNT), false); } /** * @throws Exception If failed. */ private void doTestSemaphore(ConstantTopologyChangeWorker topWorker, final boolean failoverSafe) throws Exception { final int permits = topWorker instanceof MultipleTopologyChangeWorker || topWorker instanceof PartitionedMultipleTopologyChangeWorker ? TOP_CHANGE_THREAD_CNT * 3 : TOP_CHANGE_CNT; try (IgniteSemaphore s = grid(0).semaphore(STRUCTURE_NAME, permits, failoverSafe, true)) { IgniteInternalFuture<?> fut = topWorker.startChangingTopology(new IgniteClosure<Ignite, Object>() { @Override public Object apply(Ignite ignite) { IgniteSemaphore sem = ignite.semaphore(STRUCTURE_NAME, permits, failoverSafe, false); while (true) { try { sem.acquire(1); break; } catch (IgniteInterruptedException e) { // Exception may happen in non failover safe mode. if (failoverSafe) throw e; else { // In non-failoverSafe mode semaphore is not safe to be reused, // and should always be discarded after exception is caught. break; } } } return null; } }); while (!fut.isDone()) { while (true) { try { s.acquire(1); break; } catch (IgniteInterruptedException e) { // Exception may happen in non failover safe mode. if (failoverSafe) throw e; else { // In non-failoverSafe mode semaphore is not safe to be reused, // and should always be discarded after exception is caught. break; } } } assert s.availablePermits() < permits; s.release(); assert s.availablePermits() <= permits; } fut.get(); // Semaphore is left in proper state only if failoverSafe mode is used. if (failoverSafe) { for (Ignite g : G.allGrids()) assertEquals(permits, g.semaphore(STRUCTURE_NAME, permits, false, false).availablePermits()); } } } /** * @throws Exception If failed. */ public void testReentrantLockFailsWhenServersLeft() throws Exception { testReentrantLockFailsWhenServersLeft(false); } /** * @throws Exception If failed. */ public void testFairReentrantLockFailsWhenServersLeft() throws Exception { testReentrantLockFailsWhenServersLeft(true); } /** * @throws Exception If failed. */ public void testReentrantLockFailsWhenServersLeft(final boolean fair) throws Exception { client = true; Ignite client = startGrid(gridCount()); Ignite server = grid(0); // Initialize lock. IgniteLock srvLock = server.reentrantLock("lock", true, fair, true); IgniteSemaphore semaphore = server.semaphore("sync", 0, true, true); IgniteFuture fut = client.compute().applyAsync(new IgniteClosure<Ignite, Object>() { @Override public Object apply(Ignite ignite) { final IgniteLock l = ignite.reentrantLock("lock", true, fair, true); l.lock(); assertTrue(l.isHeldByCurrentThread()); l.unlock(); assertFalse(l.isHeldByCurrentThread()); // Signal the server to go down. ignite.semaphore("sync", 0, true, true).release(); boolean isExceptionThrown = false; try { // Wait for the server to go down. Thread.sleep(1000); l.lock(); fail("Exception must be thrown."); } catch (InterruptedException ignored) { fail("Interrupted exception not expected here."); } catch (IgniteException ignored) { isExceptionThrown = true; } finally { assertTrue(isExceptionThrown); assertFalse(l.isHeldByCurrentThread()); } return null; } }, client); // Wait for the lock on client to be acquired then released. semaphore.acquire(); for (int i = 0; i < gridCount(); i++) stopGrid(i); fut.get(); client.close(); } /** * @throws Exception If failed. */ public void testReentrantLockConstantTopologyChangeFailoverSafe() throws Exception { doTestReentrantLock(new ConstantTopologyChangeWorker(TOP_CHANGE_THREAD_CNT), true, false); } /** * @throws Exception If failed. */ public void testReentrantLockConstantMultipleTopologyChangeFailoverSafe() throws Exception { doTestReentrantLock(multipleTopologyChangeWorker(TOP_CHANGE_THREAD_CNT), true, false); } /** * @throws Exception If failed. */ public void testReentrantLockConstantTopologyChangeNonFailoverSafe() throws Exception { doTestReentrantLock(new ConstantTopologyChangeWorker(TOP_CHANGE_THREAD_CNT), false, false); } /** * @throws Exception If failed. */ public void testReentrantLockConstantMultipleTopologyChangeNonFailoverSafe() throws Exception { doTestReentrantLock(multipleTopologyChangeWorker(TOP_CHANGE_THREAD_CNT), false, false); } /** * @throws Exception If failed. */ public void testFairReentrantLockConstantTopologyChangeFailoverSafe() throws Exception { doTestReentrantLock(new ConstantTopologyChangeWorker(TOP_CHANGE_THREAD_CNT), true, true); } /** * @throws Exception If failed. */ public void testFairReentrantLockConstantMultipleTopologyChangeFailoverSafe() throws Exception { doTestReentrantLock(multipleTopologyChangeWorker(TOP_CHANGE_THREAD_CNT), true, true); } /** * @throws Exception If failed. */ public void testFairReentrantLockConstantTopologyChangeNonFailoverSafe() throws Exception { doTestReentrantLock(new ConstantTopologyChangeWorker(TOP_CHANGE_THREAD_CNT), false, true); } /** * @throws Exception If failed. */ public void testFairReentrantLockConstantMultipleTopologyChangeNonFailoverSafe() throws Exception { doTestReentrantLock(multipleTopologyChangeWorker(TOP_CHANGE_THREAD_CNT), false, true); } /** * @throws Exception If failed. */ private void doTestReentrantLock( final ConstantTopologyChangeWorker topWorker, final boolean failoverSafe, final boolean fair ) throws Exception { IgniteEx ig = grid(0); try (IgniteLock lock = ig.reentrantLock(STRUCTURE_NAME, failoverSafe, fair, true)) { IgniteInternalFuture<?> fut = topWorker.startChangingTopology(new IgniteClosure<Ignite, Void>() { @Override public Void apply(Ignite ignite) { final IgniteLock l = ignite.reentrantLock(STRUCTURE_NAME, failoverSafe, fair, false); final AtomicBoolean done = new AtomicBoolean(false); GridTestUtils.runAsync(new Callable<Void>() { @Override public Void call() throws Exception { try{ l.lock(); } finally { done.set(true); } return null; } }); // Wait until l.lock() has been called. while(!l.hasQueuedThreads() && !done.get()){ // No-op. } return null; } }); while (!fut.isDone()) { try { lock.lock(); } catch (IgniteException e) { // Exception may happen in non-failoversafe mode. if (failoverSafe) throw e; } finally { // Broken lock cannot be used in non-failoversafe mode. if(!lock.isBroken() || failoverSafe) { assertTrue(lock.isHeldByCurrentThread()); lock.unlock(); assertFalse(lock.isHeldByCurrentThread()); } } } fut.get(); for (Ignite g : G.allGrids()){ IgniteLock l = g.reentrantLock(STRUCTURE_NAME, failoverSafe, fair, false); assertTrue(g.name(), !l.isHeldByCurrentThread() || lock.isBroken()); } } } /** * @throws Exception If failed. */ public void testCountDownLatchConstantTopologyChange() throws Exception { doTestCountDownLatch(new ConstantTopologyChangeWorker(TOP_CHANGE_THREAD_CNT)); } /** * @throws Exception If failed. */ public void testCountDownLatchConstantMultipleTopologyChange() throws Exception { doTestCountDownLatch(multipleTopologyChangeWorker(TOP_CHANGE_THREAD_CNT)); } /** * Tests distributed count down latch. * * @param topWorker Topology change worker. * @throws Exception If failed. */ private void doTestCountDownLatch(ConstantTopologyChangeWorker topWorker) throws Exception { try (IgniteCountDownLatch s = grid(0).countDownLatch(STRUCTURE_NAME, Integer.MAX_VALUE, false, true)) { try { IgniteInternalFuture<?> fut = topWorker.startChangingTopology( new IgniteClosure<Ignite, Object>() { @Override public Object apply(Ignite ignite) { assert ignite.countDownLatch(STRUCTURE_NAME, Integer.MAX_VALUE, false, false).count() > 0; return null; } }); int val = s.count(); while (!fut.isDone()) { assertEquals(val, s.count()); assertEquals(--val, s.countDown()); } fut.get(); for (Ignite g : G.allGrids()) assertEquals(val, g.countDownLatch(STRUCTURE_NAME, Integer.MAX_VALUE, false, true).count()); } finally { grid(0).countDownLatch(STRUCTURE_NAME, Integer.MAX_VALUE, false, false).countDownAll(); } } } /** * @throws Exception If failed. */ public void testFifoQueueTopologyChange() throws Exception { try { grid(0).queue(STRUCTURE_NAME, 0, config(false)).put(10); Ignite g = startGrid(NEW_IGNITE_INSTANCE_NAME); assertEquals(10, (int)g.<Integer>queue(STRUCTURE_NAME, 0, null).poll()); g.queue(STRUCTURE_NAME, 0, null).put(20); stopGrid(NEW_IGNITE_INSTANCE_NAME); assertEquals(20, (int)grid(0).<Integer>queue(STRUCTURE_NAME, 0, null).peek()); } finally { grid(0).<Integer>queue(STRUCTURE_NAME, 0, null).close(); } } /** * @throws Exception If failed. */ public void testQueueTopologyChange() throws Exception { ConstantTopologyChangeWorker topWorker = new ConstantTopologyChangeWorker(TOP_CHANGE_THREAD_CNT); try (final IgniteQueue<Integer> q = grid(0).queue(STRUCTURE_NAME, 0, config(false))) { for (int i = 0; i < 1000; i++) q.add(i); final IgniteInternalFuture<?> fut = topWorker.startChangingTopology(new IgniteClosure<Ignite, Object>() { @Override public Object apply(Ignite ignite) { return null; } }); IgniteInternalFuture<?> takeFut = GridTestUtils.runAsync(new Callable<Void>() { @Override public Void call() throws Exception { while (!fut.isDone()) q.take(); return null; } }); IgniteInternalFuture<?> pollFut = GridTestUtils.runAsync(new Callable<Void>() { @Override public Void call() throws Exception { while (!fut.isDone()) q.poll(); return null; } }); IgniteInternalFuture<?> addFut = GridTestUtils.runAsync(new Callable<Void>() { @Override public Void call() throws Exception { while (!fut.isDone()) q.add(0); return null; } }); fut.get(); pollFut.get(); addFut.get(); q.add(0); takeFut.get(); } } /** * @throws Exception If failed. */ public void testQueueConstantTopologyChange() throws Exception { int topChangeThreads = collectionCacheMode() == CacheMode.PARTITIONED ? 1 : TOP_CHANGE_THREAD_CNT; doTestQueue(new ConstantTopologyChangeWorker(topChangeThreads)); } /** * @throws Exception If failed. */ public void testQueueConstantMultipleTopologyChange() throws Exception { int topChangeThreads = collectionCacheMode() == CacheMode.PARTITIONED ? 1 : TOP_CHANGE_THREAD_CNT; doTestQueue(multipleTopologyChangeWorker(topChangeThreads)); } /** * Tests the queue. * * @param topWorker Topology change worker. * @throws Exception If failed. */ private void doTestQueue(ConstantTopologyChangeWorker topWorker) throws Exception { int queueMaxSize = 100; try (IgniteQueue<Integer> s = grid(0).queue(STRUCTURE_NAME, 0, config(false))) { s.put(1); IgniteInternalFuture<?> fut = topWorker.startChangingTopology(new IgniteClosure<Ignite, Object>() { @Override public Object apply(Ignite ignite) { IgniteQueue<Integer> queue = ignite.queue(STRUCTURE_NAME, 0, null); assertNotNull(queue); Integer val = queue.peek(); assertNotNull(val); assert val > 0; return null; } }); int val = s.peek(); while (!fut.isDone()) { if (s.size() == queueMaxSize) { int last = 0; for (int i = 0, size = s.size() - 1; i < size; i++) { int cur = s.poll(); if (i == 0) { last = cur; continue; } assertEquals(last, cur - 1); last = cur; } } s.put(++val); } fut.get(); val = s.peek(); for (Ignite g : G.allGrids()) assertEquals(val, (int)g.<Integer>queue(STRUCTURE_NAME, 0, null).peek()); } } /** * @throws Exception If failed. */ public void testAtomicSequenceInitialization() throws Exception { int threadCnt = 3; final AtomicInteger idx = new AtomicInteger(gridCount()); IgniteInternalFuture<?> fut = GridTestUtils.runMultiThreadedAsync(new CA() { @Override public void apply() { int id = idx.getAndIncrement(); try { log.info("Start node: " + id); startGrid(id); Thread.sleep(1000); } catch (Exception e) { throw F.wrap(e); } finally { stopGrid(id); info("Thread finished."); } } }, threadCnt, "test-thread"); while (!fut.isDone()) { grid(0).compute().call(new IgniteCallable<Object>() { /** */ @IgniteInstanceResource private Ignite g; @Override public Object call() throws Exception { IgniteAtomicSequence seq = g.atomicSequence(STRUCTURE_NAME, 1, true); assert seq != null; for (int i = 0; i < 1000; i++) seq.getAndIncrement(); return null; } }); } fut.get(); } /** * @throws Exception If failed. */ public void testAtomicSequenceTopologyChange() throws Exception { try (IgniteAtomicSequence s = grid(0).atomicSequence(STRUCTURE_NAME, 10, true)) { Ignite g = startGrid(NEW_IGNITE_INSTANCE_NAME); assertEquals(1010, g.atomicSequence(STRUCTURE_NAME, 10, false).get()); assertEquals(1020, g.atomicSequence(STRUCTURE_NAME, 10, false).addAndGet(10)); stopGrid(NEW_IGNITE_INSTANCE_NAME); } } /** * @throws Exception If failed. */ public void testAtomicSequenceConstantTopologyChange() throws Exception { doTestAtomicSequence(new ConstantTopologyChangeWorker(TOP_CHANGE_THREAD_CNT)); } /** * @throws Exception If failed. */ public void testAtomicSequenceConstantMultipleTopologyChange() throws Exception { doTestAtomicSequence(multipleTopologyChangeWorker(TOP_CHANGE_THREAD_CNT)); } /** * Tests atomic sequence. * * @param topWorker Topology change worker. * @throws Exception If failed. */ private void doTestAtomicSequence(ConstantTopologyChangeWorker topWorker) throws Exception { try (IgniteAtomicSequence s = grid(0).atomicSequence(STRUCTURE_NAME, 1, true)) { IgniteInternalFuture<?> fut = topWorker.startChangingTopology(new IgniteClosure<Ignite, Object>() { @Override public Object apply(Ignite ignite) { assertTrue(ignite.atomicSequence(STRUCTURE_NAME, 1, false).get() > 0); return null; } }); long old = s.get(); while (!fut.isDone()) { assertEquals(old, s.get()); long val = s.incrementAndGet(); assertTrue(val > old); old = val; } fut.get(); } } /** * @throws Exception If failed. */ public void testUncommitedTxLeave() throws Exception { final int val = 10; grid(0).atomicLong(STRUCTURE_NAME, val, true); GridTestUtils.runAsync(new Callable<Object>() { @Override public Object call() throws Exception { Ignite g = startGrid(NEW_IGNITE_INSTANCE_NAME); try { g.transactions().txStart(); g.cache(TRANSACTIONAL_CACHE_NAME).put(1, 1); assertEquals(val + 1, g.atomicLong(STRUCTURE_NAME, val, false).incrementAndGet()); } finally { stopGrid(NEW_IGNITE_INSTANCE_NAME); } return null; } }).get(); waitForDiscovery(G.allGrids().toArray(new Ignite[gridCount()])); assertEquals(val + 1, grid(0).atomicLong(STRUCTURE_NAME, val, false).get()); } /** * @param topChangeThreads Number of topology change threads. * * @return Specific multiple topology change worker implementation. */ private ConstantTopologyChangeWorker multipleTopologyChangeWorker(int topChangeThreads) { return collectionCacheMode() == CacheMode.PARTITIONED ? new PartitionedMultipleTopologyChangeWorker(topChangeThreads) : new MultipleTopologyChangeWorker(topChangeThreads); } /** * */ private class ConstantTopologyChangeWorker { /** */ protected final AtomicBoolean failed = new AtomicBoolean(false); /** */ private final int topChangeThreads; /** * @param topChangeThreads Number of topology change threads. */ public ConstantTopologyChangeWorker(int topChangeThreads) { this.topChangeThreads = topChangeThreads; } /** * Starts changing cluster's topology. * * @param cb Callback to run after node start. * @return Future. */ IgniteInternalFuture<?> startChangingTopology(final IgniteClosure<Ignite, ?> cb) { final AtomicInteger nodeIdx = new AtomicInteger(G.allGrids().size()); return GridTestUtils.runMultiThreadedAsync(new CA() { @Override public void apply() { try { for (int i = 0; i < TOP_CHANGE_CNT; i++) { if (failed.get()) return; int idx = nodeIdx.getAndIncrement(); Thread.currentThread().setName("thread-" + getTestIgniteInstanceName(idx)); try { log.info("Start node: " + getTestIgniteInstanceName(idx)); Ignite g = startGrid(idx); cb.apply(g); } finally { stopGrid(idx); } } } catch (Exception e) { if (failed.compareAndSet(false, true)) throw F.wrap(e); } } }, topChangeThreads, "topology-change-thread"); } } /** * */ private class MultipleTopologyChangeWorker extends ConstantTopologyChangeWorker { /** * @param topChangeThreads Number of topology change threads. */ public MultipleTopologyChangeWorker(int topChangeThreads) { super(topChangeThreads); } /** * Starts changing cluster's topology. * * @return Future. */ @Override IgniteInternalFuture<?> startChangingTopology(final IgniteClosure<Ignite, ?> cb) { return GridTestUtils.runMultiThreadedAsync(new CA() { @Override public void apply() { try { for (int i = 0; i < TOP_CHANGE_CNT; i++) { if (failed.get()) return; Collection<String> names = new GridLeanSet<>(3); try { for (int j = 0; j < 3; j++) { if (failed.get()) return; String name = UUID.randomUUID().toString(); log.info("Start node: " + name); Ignite g = startGrid(name); names.add(name); cb.apply(g); } } finally { for (String name : names) stopGrid(name); } } } catch (Exception e) { if (failed.compareAndSet(false, true)) throw F.wrap(e); } } }, TOP_CHANGE_THREAD_CNT, "topology-change-thread"); } } /** * */ private class PartitionedMultipleTopologyChangeWorker extends ConstantTopologyChangeWorker { /** */ private CyclicBarrier barrier; /** * @param topChangeThreads Number of topology change threads. */ public PartitionedMultipleTopologyChangeWorker(int topChangeThreads) { super(topChangeThreads); } /** * Starts changing cluster's topology. * * @return Future. */ @Override IgniteInternalFuture<?> startChangingTopology(final IgniteClosure<Ignite, ?> cb) { final Semaphore sem = new Semaphore(TOP_CHANGE_THREAD_CNT); final ConcurrentSkipListSet<String> startedNodes = new ConcurrentSkipListSet<>(); barrier = new CyclicBarrier(TOP_CHANGE_THREAD_CNT, new Runnable() { @Override public void run() { try { assertEquals(TOP_CHANGE_THREAD_CNT * 3, startedNodes.size()); for (String name : startedNodes) { stopGrid(name, false); awaitPartitionMapExchange(); } startedNodes.clear(); sem.release(TOP_CHANGE_THREAD_CNT); barrier.reset(); } catch (Exception e) { if (failed.compareAndSet(false, true)) { sem.release(TOP_CHANGE_THREAD_CNT); barrier.reset(); throw F.wrap(e); } } } }); IgniteInternalFuture<?> fut = GridTestUtils.runMultiThreadedAsync(new CA() { @Override public void apply() { try { for (int i = 0; i < TOP_CHANGE_CNT; i++) { sem.acquire(); if (failed.get()) return; for (int j = 0; j < 3; j++) { if (failed.get()) return; String name = UUID.randomUUID().toString(); startedNodes.add(name); log.info("Start node: " + name); Ignite g = startGrid(name); cb.apply(g); } try { barrier.await(); } catch (BrokenBarrierException ignored) { // No-op. } } } catch (Exception e) { if (failed.compareAndSet(false, true)) { sem.release(TOP_CHANGE_THREAD_CNT); barrier.reset(); throw F.wrap(e); } } } }, TOP_CHANGE_THREAD_CNT, "topology-change-thread"); return fut; } } }
/* * Copyright 2006-2021 Prowide * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.prowidesoftware.swift.model.field; import com.prowidesoftware.swift.model.Tag; import com.prowidesoftware.Generated; import com.prowidesoftware.deprecation.ProwideDeprecated; import com.prowidesoftware.deprecation.TargetYear; import java.io.Serializable; import java.util.Locale; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.HashMap; import java.math.BigDecimal; import java.math.BigInteger; import java.util.Calendar; import com.prowidesoftware.swift.model.field.AmountContainer; import com.prowidesoftware.swift.model.field.AmountResolver; import com.prowidesoftware.swift.model.field.DateContainer; import com.prowidesoftware.swift.model.field.DateResolver; import org.apache.commons.lang3.StringUtils; import com.prowidesoftware.swift.model.field.SwiftParseUtils; import com.prowidesoftware.swift.model.field.Field; import com.prowidesoftware.swift.model.*; import com.prowidesoftware.swift.utils.SwiftFormatUtils; import com.google.gson.JsonObject; import com.google.gson.JsonParser; /** * SWIFT MT Field 37A. * <p> * Model and parser for field 37A of a SWIFT MT message. * * <p>Subfields (components) Data types * <ol> * <li><code>BigDecimal</code></li> * <li><code>Calendar</code></li> * <li><code>String</code></li> * <li><code>Long</code></li> * <li><code>String</code></li> * </ol> * * <p>Structure definition * <ul> * <li>validation pattern: <code>&lt;AMOUNT&gt;12[//&lt;DATE2&gt;&lt;DM&gt;3n][/16x]</code></li> * <li>parser pattern: <code>N[//&lt;DATE2&gt;cS][/S]</code></li> * <li>components pattern: <code>NESNS</code></li> * </ul> * * <p> * This class complies with standard release <strong>SRU2021</strong> */ @SuppressWarnings("unused") @Generated public class Field37A extends Field implements Serializable, DateContainer, AmountContainer { /** * Constant identifying the SRU to which this class belongs to. */ public static final int SRU = 2021; private static final long serialVersionUID = 1L; /** * Constant with the field name 37A. */ public static final String NAME = "37A"; /** * Same as NAME, intended to be clear when using static imports. */ public static final String F_37A = "37A"; /** * @deprecated use {@link #parserPattern()} method instead. */ @Deprecated @ProwideDeprecated(phase2 = TargetYear.SRU2022) public static final String PARSER_PATTERN = "N[//<DATE2>cS][/S]"; /** * @deprecated use {@link #typesPattern()} method instead. */ @Deprecated @ProwideDeprecated(phase2 = TargetYear.SRU2022) public static final String COMPONENTS_PATTERN = "NESNS"; /** * @deprecated use {@link #typesPattern()} method instead. */ @Deprecated @ProwideDeprecated(phase2 = TargetYear.SRU2022) public static final String TYPES_PATTERN = "IESNS"; /** * Component number for the Rate subfield. */ public static final Integer RATE = 1; /** * Component number for the End Date subfield. */ public static final Integer END_DATE = 2; /** * Alternative (<em>DEPRECATED</em>) constant name for field's End Date Component number. * @see #END_DATE */ @Deprecated @ProwideDeprecated(phase2 = TargetYear.SRU2022) public static final Integer DATE = 2; /** * Component number for the Period subfield. */ public static final Integer PERIOD = 3; /** * Alternative (<em>DEPRECATED</em>) constant name for field's Period Component number. * @see #PERIOD */ @Deprecated @ProwideDeprecated(phase2 = TargetYear.SRU2022) public static final Integer DM_MARK = 3; /** * Component number for the Number subfield. */ public static final Integer NUMBER = 4; /** * Alternative (<em>DEPRECATED</em>) constant name for field's Number Component number. * @see #NUMBER */ @Deprecated @ProwideDeprecated(phase2 = TargetYear.SRU2022) public static final Integer NUMBER_OF_DAYSMONTHS = 4; /** * Component number for the Information subfield. */ public static final Integer INFORMATION = 5; /** * Default constructor. Creates a new field setting all components to null. */ public Field37A() { super(5); } /** * Creates a new field and initializes its components with content from the parameter value. * @param value complete field value including separators and CRLF */ public Field37A(final String value) { super(value); } /** * Creates a new field and initializes its components with content from the parameter tag. * The value is parsed with {@link #parse(String)} * @throws IllegalArgumentException if the parameter tag is null or its tagname does not match the field name * @since 7.8 */ public Field37A(final Tag tag) { this(); if (tag == null) { throw new IllegalArgumentException("tag cannot be null."); } if (!StringUtils.equals(tag.getName(), "37A")) { throw new IllegalArgumentException("cannot create field 37A from tag "+tag.getName()+", tagname must match the name of the field."); } parse(tag.getValue()); } /** * Copy constructor. * Initializes the components list with a deep copy of the source components list. * @param source a field instance to copy * @since 7.7 */ public static Field37A newInstance(Field37A source) { Field37A cp = new Field37A(); cp.setComponents(new ArrayList<>(source.getComponents())); return cp; } /** * Create a Tag with this field name and the given value. * Shorthand for <code>new Tag(NAME, value)</code> * @see #NAME * @since 7.5 */ public static Tag tag(final String value) { return new Tag(NAME, value); } /** * Create a Tag with this field name and an empty string as value. * Shorthand for <code>new Tag(NAME, "")</code> * @see #NAME * @since 7.5 */ public static Tag emptyTag() { return new Tag(NAME, ""); } /** * Parses the parameter value into the internal components structure. * * <p>Used to update all components from a full new value, as an alternative * to setting individual components. Previous component values are overwritten. * * @param value complete field value including separators and CRLF * @since 7.8 */ @Override public void parse(final String value) { init(5); setComponent1(SwiftParseUtils.getTokenFirst(value, "//")); String toparse = SwiftParseUtils.getTokenSecondLast(value, "//"); if (toparse != null) { if (toparse.length() >= 6) { setComponent2(StringUtils.substring(toparse, 0, 6)); } if (toparse.length() >= 7) { setComponent3(StringUtils.substring(toparse, 6, 7)); } if (toparse.length() > 7) { String toparse2 = StringUtils.substring(toparse, 7); setComponent4(SwiftParseUtils.getTokenFirst(toparse2, "/")); setComponent5(SwiftParseUtils.getTokenSecondLast(toparse2, "/")); } } } /** * Serializes the fields' components into the single string value (SWIFT format) */ @Override public String getValue() { final StringBuilder result = new StringBuilder(); append(result, 1); if (getComponent2() != null || getComponent3() != null || getComponent4() != null) { result.append("//"); append(result, 2); append(result, 3); append(result, 4); } if (getComponent5() != null) { result.append("/").append(getComponent5()); } return result.toString(); } /** * Returns a localized suitable for showing to humans string of a field component.<br> * * @param component number of the component to display * @param locale optional locale to format date and amounts, if null, the default locale is used * @return formatted component value or null if component number is invalid or not present * @throws IllegalArgumentException if component number is invalid for the field * @since 7.8 */ @Override public String getValueDisplay(int component, Locale locale) { if (component < 1 || component > 5) { throw new IllegalArgumentException("invalid component number " + component + " for field 37A"); } if (component == 1) { //amount, rate java.text.NumberFormat f = java.text.NumberFormat.getNumberInstance(notNull(locale)); f.setMaximumFractionDigits(13); BigDecimal n = getComponent1AsBigDecimal(); if (n != null) { return f.format(n); } } if (component == 2) { //date: [YY]YYMMDD java.text.DateFormat f = java.text.DateFormat.getDateInstance(java.text.DateFormat.DEFAULT, notNull(locale)); java.util.Calendar cal = getComponent2AsCalendar(); if (cal != null) { return f.format(cal.getTime()); } } if (component == 3) { //default format (as is) return getComponent(3); } if (component == 4) { //default format (as is) return getComponent(4); } if (component == 5) { //default format (as is) return getComponent(5); } return null; } /** * @deprecated use {@link #typesPattern()} instead. */ @Override @Deprecated @ProwideDeprecated(phase2 = TargetYear.SRU2022) public String componentsPattern() { return "NESNS"; } /** * Returns the field component types pattern. * * This method returns a letter representing the type for each component in the Field. It supersedes * the Components Pattern because it distinguishes between N (Number) and I (BigDecimal). * @since 9.2.7 */ @Override public String typesPattern() { return "IESNS"; } /** * Returns the field parser pattern. */ @Override public String parserPattern() { return "N[//<DATE2>cS][/S]"; } /** * Returns the field validator pattern */ @Override public String validatorPattern() { return "<AMOUNT>12[//<DATE2><DM>3n][/16x]"; } /** * Given a component number it returns true if the component is optional, * regardless of the field being mandatory in a particular message.<br> * Being the field's value conformed by a composition of one or several * internal component values, the field may be present in a message with * a proper value but with some of its internal components not set. * * @param component component number, first component of a field is referenced as 1 * @return true if the component is optional for this field, false otherwise */ @Override public boolean isOptional(int component) { if (component == 2) { return true; } if (component == 3) { return true; } if (component == 4) { return true; } if (component == 5) { return true; } return false; } /** * Returns true if the field is a GENERIC FIELD as specified by the standard. * @return true if the field is generic, false otherwise */ @Override public boolean isGeneric() { return false; } /** * Returns the defined amount of components.<br> * This is not the amount of components present in the field instance, but the total amount of components * that this field accepts as defined. * @since 7.7 */ @Override public int componentsSize() { return 5; } /** * Returns english label for components. * <br> * The index in the list is in sync with specific field component structure. * @see #getComponentLabel(int) * @since 7.8.4 */ @Override public List<String> getComponentLabels() { List<String> result = new ArrayList<>(); result.add("Rate"); result.add("End Date"); result.add("Period"); result.add("Number"); result.add("Information"); return result; } /** * Returns a mapping between component numbers and their label in camel case format. * @since 7.10.3 */ @Override protected Map<Integer, String> getComponentMap() { Map<Integer, String> result = new HashMap<>(); result.put(1, "rate"); result.put(2, "endDate"); result.put(3, "period"); result.put(4, "number"); result.put(5, "information"); return result; } /** * Gets the component 1 (Rate). * @return the component 1 */ public String getComponent1() { return getComponent(1); } /** * Get the component 1 as BigDecimal * * @return the component 1 converted to BigDecimal or null if cannot be converted * @since 9.2.7 */ public java.math.BigDecimal getComponent1AsBigDecimal() { return SwiftFormatUtils.getBigDecimal(getComponent(1)); } /** * Get the component 1 as Number (BigDecimal) * * The value is returned as BigDecimal to keep compatibility with previous API. You should * use <code>getComponent1AsBigDecimal()</code> to get the proper value. * * @return the component 1 converted to Number (BigDecimal) or null if cannot be converted * @see #getComponent1AsBigDecimal() */ @Deprecated @ProwideDeprecated(phase2 = TargetYear.SRU2022) public java.lang.Number getComponent1AsNumber() { return getComponent1AsBigDecimal(); } /** * Gets the Rate (component 1). * @return the Rate from component 1 */ public String getRate() { return getComponent1(); } /** * Get the Rate (component 1) as BigDecimal * @return the Rate from component 1 converted to BigDecimal or null if cannot be converted * @since 9.2.7 */ public java.math.BigDecimal getRateAsBigDecimal() { return getComponent1AsBigDecimal(); } /** * Get the Rate (component 1) as as Number (BigDecimal) * * The value is returned as BigDecimal to keep compatibility with previous API. You should * use <code>getComponent1AsBigDecimal()</code> to get the proper value. * * @return the component 1 converted to Number (BigDecimal) or null if cannot be converted * @see #getRateAsBigDecimal() */ @Deprecated @ProwideDeprecated(phase2 = TargetYear.SRU2022) public java.lang.Number getRateAsNumber() { return getComponent1AsNumber(); } /** * Gets the component 2 (End Date). * @return the component 2 */ public String getComponent2() { return getComponent(2); } /** * Get the component 2 as Calendar * * @return the component 2 converted to Calendar or null if cannot be converted */ public java.util.Calendar getComponent2AsCalendar() { return SwiftFormatUtils.getDate2(getComponent(2)); } /** * Gets the End Date (component 2). * @return the End Date from component 2 */ public String getEndDate() { return getComponent2(); } /** * Alternative <em>DEPRECATED</em> method getter for field's End Date * @see #getEndDate() * @since 9.2.7 */ @Deprecated @ProwideDeprecated(phase2 = TargetYear.SRU2022) public String getDate() { return getEndDate(); } /** * Get the End Date (component 2) as Calendar * @return the End Date from component 2 converted to Calendar or null if cannot be converted */ public java.util.Calendar getEndDateAsCalendar() { return getComponent2AsCalendar(); } /** * Alternative <em>DEPRECATED</em> method getter for field's End Date as Calendar * @see #getEndDateAsCalendar() */ @Deprecated @ProwideDeprecated(phase2 = TargetYear.SRU2022) public java.util.Calendar getDateAsCalendar() { return getEndDateAsCalendar(); } /** * Gets the component 3 (Period). * @return the component 3 */ public String getComponent3() { return getComponent(3); } /** * Gets the Period (component 3). * @return the Period from component 3 */ public String getPeriod() { return getComponent3(); } /** * Alternative <em>DEPRECATED</em> method getter for field's Period * @see #getPeriod() * @since 9.2.7 */ @Deprecated @ProwideDeprecated(phase2 = TargetYear.SRU2022) public String getDMMark() { return getPeriod(); } /** * Gets the component 4 (Number). * @return the component 4 */ public String getComponent4() { return getComponent(4); } /** * Get the component 4 as Long * * @return the component 4 converted to Long or null if cannot be converted * @since 9.2.7 */ public java.lang.Long getComponent4AsLong() { return SwiftFormatUtils.getLong(getComponent(4)); } /** * Get the component 4 as Number (BigDecimal) * * The value is returned as BigDecimal to keep compatibility with previous API. You should * use <code>getComponent4AsLong()</code> to get the proper value. * * @return the component 4 converted to Number (BigDecimal) or null if cannot be converted * @see #getComponent4AsLong() */ @Deprecated @ProwideDeprecated(phase2 = TargetYear.SRU2022) public java.lang.Number getComponent4AsNumber() { Long l = getComponent4AsLong(); return l != null ? new BigDecimal(l) : null; } /** * Gets the Number (component 4). * @return the Number from component 4 */ public String getNumber() { return getComponent4(); } /** * Alternative <em>DEPRECATED</em> method getter for field's Number * @see #getNumber() * @since 9.2.7 */ @Deprecated @ProwideDeprecated(phase2 = TargetYear.SRU2022) public String getNumberofDaysMonths() { return getNumber(); } /** * Get the Number (component 4) as Long * @return the Number from component 4 converted to Long or null if cannot be converted * @since 9.2.7 */ public java.lang.Long getNumberAsLong() { return getComponent4AsLong(); } /** * Get the Number (component 4) as as Number (BigDecimal) * * The value is returned as BigDecimal to keep compatibility with previous API. You should * use <code>getComponent4AsLong()</code> to get the proper value. * * @return the component 4 converted to Number (BigDecimal) or null if cannot be converted * @see #getNumberAsLong() */ @Deprecated @ProwideDeprecated(phase2 = TargetYear.SRU2022) public java.lang.Number getNumberAsNumber() { return getComponent4AsNumber(); } /** * Alternative <em>DEPRECATED</em> method getter for field's Number as Long * @see #getNumberAsLong() */ @Deprecated @ProwideDeprecated(phase2 = TargetYear.SRU2022) public java.lang.Long getNumberofDaysMonthsAsLong() { return getNumberAsLong(); } /** * Alternative <em>DEPRECATED</em> method getter for field's Number (component 4) as as Number (BigDecimal) * * The value is returned as BigDecimal to keep compatibility with previous API. You should * use <code>getComponent4AsLong()</code> to get the proper value. * * @return the component 4 converted to Number (BigDecimal) or null if cannot be converted * @see #getNumberAsLong() */ @Deprecated @ProwideDeprecated(phase2 = TargetYear.SRU2022) public java.lang.Number getNumberofDaysMonthsAsNumber() { return getNumberAsNumber(); } /** * Gets the component 5 (Information). * @return the component 5 */ public String getComponent5() { return getComponent(5); } /** * Gets the Information (component 5). * @return the Information from component 5 */ public String getInformation() { return getComponent5(); } /** * Set the component 1 (Rate). * * @param component1 the Rate to set * @return the field object to enable build pattern */ public Field37A setComponent1(String component1) { setComponent(1, component1); return this; } /** * Set the component1 from a BigDecimal object. * <br> * Parses the BigDecimal into a SWIFT amount with truncated zero decimals and mandatory decimal separator. * <ul> * <li>Example: 1234.00 -&gt; 1234,</li> * <li>Example: 1234 -&gt; 1234,</li> * <li>Example: 1234.56 -&gt; 1234,56</li> * </ul> * @since 9.2.7 * * @param component1 the BigDecimal with the Rate content to set * @return the field object to enable build pattern */ public Field37A setComponent1(java.math.BigDecimal component1) { setComponent(1, SwiftFormatUtils.getBigDecimal(component1)); return this; } /** * Alternative method setter for field's Rate (component 1) as as Number * * This method supports java constant value boxing for simpler coding styles (ex: 10.0 becomes an Float) * * @param component1 the Number with the Rate content to set * @return the field object to enable build pattern * @see #setRate(java.math.BigDecimal) */ public Field37A setComponent1(java.lang.Number component1) { // NOTE: remember instanceof implicitly checks for non-null if (component1 instanceof BigDecimal) { setComponent(1, SwiftFormatUtils.getBigDecimal((BigDecimal) component1)); } else if (component1 instanceof BigInteger) { setComponent(1, SwiftFormatUtils.getBigDecimal(new BigDecimal((BigInteger) component1))); } else if (component1 instanceof Long || component1 instanceof Integer) { setComponent(1, SwiftFormatUtils.getBigDecimal(BigDecimal.valueOf(component1.longValue()))); } else if (component1 != null) { // it's other non-null Number (Float, Double, etc...) setComponent(1, SwiftFormatUtils.getBigDecimal(BigDecimal.valueOf(component1.doubleValue()))); } else { // explicitly set component as null setComponent(1, null); } return this; } /** * Set the Rate (component 1). * * @param component1 the Rate to set * @return the field object to enable build pattern */ public Field37A setRate(String component1) { return setComponent1(component1); } /** * Set the Rate (component 1) from a BigDecimal object. * * @see #setComponent1(java.math.BigDecimal) * * @param component1 BigDecimal with the Rate content to set * @return the field object to enable build pattern * @since 9.2.7 */ public Field37A setRate(java.math.BigDecimal component1) { return setComponent1(component1); } /** * Alternative method setter for field's Rate (component 1) as as Number * * This method supports java constant value boxing for simpler coding styles (ex: 10 becomes an Integer) * * @param component1 the Number with the Rate content to set * @return the field object to enable build pattern * @see #setRate(java.math.BigDecimal) */ public Field37A setRate(java.lang.Number component1) { return setComponent1(component1); } /** * Set the component 2 (End Date). * * @param component2 the End Date to set * @return the field object to enable build pattern */ public Field37A setComponent2(String component2) { setComponent(2, component2); return this; } /** * Set the component2 from a Calendar object. * * @param component2 the Calendar with the End Date content to set * @return the field object to enable build pattern */ public Field37A setComponent2(java.util.Calendar component2) { setComponent(2, SwiftFormatUtils.getDate2(component2)); return this; } /** * Set the End Date (component 2). * * @param component2 the End Date to set * @return the field object to enable build pattern */ public Field37A setEndDate(String component2) { return setComponent2(component2); } /** * Set the End Date (component 2) from a Calendar object. * * @see #setComponent2(java.util.Calendar) * * @param component2 Calendar with the End Date content to set * @return the field object to enable build pattern */ public Field37A setEndDate(java.util.Calendar component2) { return setComponent2(component2); } /** * Alternative <em>DEPRECATED</em> method setter for field's End Date * * @see #setEndDate(String) * * @param component2 the End Date to set * @return the field object to enable build pattern */ @Deprecated @ProwideDeprecated(phase2 = TargetYear.SRU2022) public Field37A setDate(String component2) { return setEndDate(component2); } /** * Alternative <em>DEPRECATED</em> method setter for field's End Date from a Calendar object. * * @see #setComponent2(java.util.Calendar) * * @param component2 Calendar with the End Date content to set * @return the field object to enable build pattern */ @Deprecated @ProwideDeprecated(phase2 = TargetYear.SRU2022) public Field37A setDate(java.util.Calendar component2) { return setEndDate(component2); } /** * Set the component 3 (Period). * * @param component3 the Period to set * @return the field object to enable build pattern */ public Field37A setComponent3(String component3) { setComponent(3, component3); return this; } /** * Set the Period (component 3). * * @param component3 the Period to set * @return the field object to enable build pattern */ public Field37A setPeriod(String component3) { return setComponent3(component3); } /** * Alternative <em>DEPRECATED</em> method setter for field's Period * * @see #setPeriod(String) * * @param component3 the Period to set * @return the field object to enable build pattern */ @Deprecated @ProwideDeprecated(phase2 = TargetYear.SRU2022) public Field37A setDMMark(String component3) { return setPeriod(component3); } /** * Set the component 4 (Number). * * @param component4 the Number to set * @return the field object to enable build pattern */ public Field37A setComponent4(String component4) { setComponent(4, component4); return this; } /** * Set the component4 from a Long object. * <br> * <em>If the component being set is a fixed length number, the argument will not be * padded.</em> It is recommended for these cases to use the setComponent4(String) * method. * * @see #setComponent4(String) * @since 9.2.7 * * @param component4 the Long with the Number content to set * @return the field object to enable build pattern */ public Field37A setComponent4(java.lang.Long component4) { setComponent(4, SwiftFormatUtils.getLong(component4)); return this; } /** * Alternative method setter for field's Number (component 4) as as Number * * This method supports java constant value boxing for simpler coding styles (ex: 10 becomes an Integer) * * @param component4 the Number with the Number content to set * @return the field object to enable build pattern * @see #setNumber(java.lang.Long) */ public Field37A setComponent4(java.lang.Number component4) { // NOTE: remember instanceof implicitly checks for non-null if (component4 instanceof Long) { setComponent(4, SwiftFormatUtils.getLong((Long) component4)); } else if (component4 instanceof BigInteger || component4 instanceof Integer) { setComponent(4, SwiftFormatUtils.getLong(component4.longValue())); } else if (component4 != null) { // it's another non-null Number (Float, Double, BigDecimal, etc...) setComponent(4, SwiftFormatUtils.getLong(component4.longValue())); } else { // explicitly set component as null setComponent(4, null); } return this; } /** * Set the Number (component 4). * * @param component4 the Number to set * @return the field object to enable build pattern */ public Field37A setNumber(String component4) { return setComponent4(component4); } /** * Set the Number (component 4) from a Long object. * * @see #setComponent4(java.lang.Long) * * @param component4 Long with the Number content to set * @return the field object to enable build pattern * @since 9.2.7 */ public Field37A setNumber(java.lang.Long component4) { return setComponent4(component4); } /** * Alternative method setter for field's Number (component 4) as as Number * * This method supports java constant value boxing for simpler coding styles (ex: 10 becomes an Integer) * * @param component4 the Number with the Number content to set * @return the field object to enable build pattern * @see #setNumber(java.lang.Long) */ public Field37A setNumber(java.lang.Number component4) { return setComponent4(component4); } /** * Alternative <em>DEPRECATED</em> method setter for field's Number * * @see #setNumber(String) * * @param component4 the Number to set * @return the field object to enable build pattern */ @Deprecated @ProwideDeprecated(phase2 = TargetYear.SRU2022) public Field37A setNumberofDaysMonths(String component4) { return setNumber(component4); } /** * Alternative <em>DEPRECATED</em> method setter for field's Number from a Long object. * * @see #setComponent4(java.lang.Long) * * @param component4 Long with the Number content to set * @return the field object to enable build pattern * @since 9.2.7 */ @Deprecated @ProwideDeprecated(phase2 = TargetYear.SRU2022) public Field37A setNumberofDaysMonths(java.lang.Long component4) { return setNumber(component4); } /** * Alternative <em>DEPRECATED</em> method setter for field's Number (component 4) as as Number * * This method supports java constant value boxing for simpler coding styles (ex: 10 becomes an Integer) * * @param component4 the Number with the Number content to set * @return the field object to enable build pattern * @see #setNumber(java.lang.Long) */ @Deprecated @ProwideDeprecated(phase2 = TargetYear.SRU2022) public Field37A setNumberofDaysMonths(java.lang.Number component4) { return setNumber(component4); } /** * Set the component 5 (Information). * * @param component5 the Information to set * @return the field object to enable build pattern */ public Field37A setComponent5(String component5) { setComponent(5, component5); return this; } /** * Set the Information (component 5). * * @param component5 the Information to set * @return the field object to enable build pattern */ public Field37A setInformation(String component5) { return setComponent5(component5); } /** * Returns all components that can be converted to a Calendar * * @return the list of converted components (a Calendar object or null) */ public List<Calendar> dates() { return DateResolver.dates(this); } /** * Returns the first component that can be converted to a Calendar * * @return the converted components (a Calendar object or null) */ public Calendar date() { return DateResolver.date(this); } /** * Returns the list of all NON-NULL amounts as BigDecimal * * @return the list of NON-NULL amounts as BigDecimal values * @see AmountResolver#amounts(Field) */ public List<BigDecimal> amounts() { return AmountResolver.amounts(this); } /** * Returns the first amounts as BigDecimal * * @return the first amount as BigDecimal value. Can be null * @see AmountResolver#amount(Field) */ public BigDecimal amount() { return AmountResolver.amount(this); } /** * Returns the field's name composed by the field number and the letter option (if any). * @return the static value of Field37A.NAME */ @Override public String getName() { return NAME; } /** * Gets the first occurrence form the tag list or null if not found. * @return null if not found o block is null or empty * @param block may be null or empty */ public static Field37A get(final SwiftTagListBlock block) { if (block == null || block.isEmpty()) { return null; } final Tag t = block.getTagByName(NAME); if (t == null) { return null; } return new Field37A(t); } /** * Gets the first instance of Field37A in the given message. * @param msg may be empty or null * @return null if not found or msg is empty or null * @see #get(SwiftTagListBlock) */ public static Field37A get(final SwiftMessage msg) { if (msg == null || msg.getBlock4() == null || msg.getBlock4().isEmpty()) { return null; } return get(msg.getBlock4()); } /** * Gets a list of all occurrences of the field Field37A in the given message * an empty list is returned if none found. * @param msg may be empty or null in which case an empty list is returned * @see #getAll(SwiftTagListBlock) */ public static List<Field37A> getAll(final SwiftMessage msg) { if (msg == null || msg.getBlock4() == null || msg.getBlock4().isEmpty()) { return java.util.Collections.emptyList(); } return getAll(msg.getBlock4()); } /** * Gets a list of all occurrences of the field Field37A from the given block * an empty list is returned if none found. * * @param block may be empty or null in which case an empty list is returned */ public static List<Field37A> getAll(final SwiftTagListBlock block) { final List<Field37A> result = new ArrayList<>(); if (block == null || block.isEmpty()) { return result; } final Tag[] arr = block.getTagsByName(NAME); if (arr != null && arr.length > 0) { for (final Tag f : arr) { result.add(new Field37A(f)); } } return result; } /** * This method deserializes the JSON data into a Field37A object. * @param json JSON structure including tuples with label and value for all field components * @return a new field instance with the JSON data parsed into field components or an empty field id the JSON is invalid * @since 7.10.3 * @see Field#fromJson(String) */ public static Field37A fromJson(final String json) { final Field37A field = new Field37A(); final JsonObject jsonObject = JsonParser.parseString(json).getAsJsonObject(); // **** COMPONENT 1 - Rate if (jsonObject.get("rate") != null) { field.setComponent1(jsonObject.get("rate").getAsString()); } // **** COMPONENT 2 - End Date // first try using alias's names (including deprecated ones, if any) if (jsonObject.get("date") != null) { field.setComponent2(jsonObject.get("date").getAsString()); } // last try using the official component's name (overwrites alternatives and DEPRECATED) if (jsonObject.get("endDate") != null) { field.setComponent2(jsonObject.get("endDate").getAsString()); } // **** COMPONENT 3 - Period // first try using alias's names (including deprecated ones, if any) if (jsonObject.get("dMMark") != null) { field.setComponent3(jsonObject.get("dMMark").getAsString()); } // last try using the official component's name (overwrites alternatives and DEPRECATED) if (jsonObject.get("period") != null) { field.setComponent3(jsonObject.get("period").getAsString()); } // **** COMPONENT 4 - Number // first try using alias's names (including deprecated ones, if any) if (jsonObject.get("numberofDaysMonths") != null) { field.setComponent4(jsonObject.get("numberofDaysMonths").getAsString()); } // last try using the official component's name (overwrites alternatives and DEPRECATED) if (jsonObject.get("number") != null) { field.setComponent4(jsonObject.get("number").getAsString()); } // **** COMPONENT 5 - Information if (jsonObject.get("information") != null) { field.setComponent5(jsonObject.get("information").getAsString()); } return field; } }
// Copyright 2019 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.components.content_capture; import android.content.Context; import android.os.Build; import android.view.View; import android.view.ViewStructure; import androidx.annotation.VisibleForTesting; import org.chromium.base.Log; import org.chromium.base.annotations.CalledByNative; import org.chromium.base.annotations.JNINamespace; import org.chromium.base.annotations.NativeMethods; import org.chromium.content_public.browser.RenderCoordinates; import org.chromium.content_public.browser.WebContents; import java.lang.ref.WeakReference; import java.util.ArrayList; import java.util.Arrays; import java.util.List; /** * This class receives captured content from native and forwards to ContetnCaptureConsumer. */ @JNINamespace("content_capture") public class OnscreenContentProvider { private static final String TAG = "ContentCapture"; private static Boolean sDump; private long mNativeOnscreenContentProviderAndroid; private ArrayList<ContentCaptureConsumer> mContentCaptureConsumers = new ArrayList<ContentCaptureConsumer>(); private WeakReference<WebContents> mWebContents; public OnscreenContentProvider( Context context, View view, ViewStructure structure, WebContents webContents) { mWebContents = new WeakReference<WebContents>(webContents); if (sDump == null) sDump = ContentCaptureFeatures.isDumpForTestingEnabled(); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) { ContentCaptureConsumer consumer = PlatformContentCaptureConsumer.create(context, view, structure, webContents); if (consumer != null) { mContentCaptureConsumers.add(consumer); } } if (ContentCaptureFeatures.shouldTriggerContentCaptureForExperiment()) { mContentCaptureConsumers.add(new ExperimentContentCaptureConsumer()); } if (!mContentCaptureConsumers.isEmpty()) { createNativeObject(); } } public OnscreenContentProvider(Context context, View view, WebContents webContents) { this(context, view, null, webContents); } public void destroy() { destroyNativeObject(); } private void destroyNativeObject() { if (mNativeOnscreenContentProviderAndroid == 0) return; OnscreenContentProviderJni.get().destroy(mNativeOnscreenContentProviderAndroid); mNativeOnscreenContentProviderAndroid = 0; } private void createNativeObject() { WebContents webContents = mWebContents.get(); if (webContents != null) { mNativeOnscreenContentProviderAndroid = OnscreenContentProviderJni.get().init(this, webContents); } } public void addConsumer(ContentCaptureConsumer consumer) { mContentCaptureConsumers.add(consumer); if (mNativeOnscreenContentProviderAndroid == 0) createNativeObject(); } public void removeConsumer(ContentCaptureConsumer consumer) { mContentCaptureConsumers.remove(consumer); if (mContentCaptureConsumers.isEmpty()) destroyNativeObject(); } public void onWebContentsChanged(WebContents current) { mWebContents = new WeakReference<WebContents>(current); if (mNativeOnscreenContentProviderAndroid != 0) { OnscreenContentProviderJni.get().onWebContentsChanged( mNativeOnscreenContentProviderAndroid, current); } } @CalledByNative private void didCaptureContent(Object[] session, ContentCaptureFrame data) { FrameSession frameSession = toFrameSession(session); String[] urls = buildUrls(frameSession, data); for (ContentCaptureConsumer consumer : mContentCaptureConsumers) { if (consumer.shouldCapture(urls)) { consumer.onContentCaptured(frameSession, data); } } if (sDump.booleanValue()) Log.i(TAG, "Captured Content: %s", data); } @CalledByNative private void didUpdateContent(Object[] session, ContentCaptureFrame data) { FrameSession frameSession = toFrameSession(session); String[] urls = buildUrls(frameSession, data); for (ContentCaptureConsumer consumer : mContentCaptureConsumers) { if (consumer.shouldCapture(urls)) { consumer.onContentUpdated(frameSession, data); } } if (sDump.booleanValue()) Log.i(TAG, "Updated Content: %s", data); } @CalledByNative private void didRemoveContent(Object[] session, long[] data) { FrameSession frameSession = toFrameSession(session); String[] urls = buildUrls(frameSession, null); for (ContentCaptureConsumer consumer : mContentCaptureConsumers) { if (consumer.shouldCapture(urls)) { consumer.onContentRemoved(frameSession, data); } } if (sDump.booleanValue()) { Log.i(TAG, "Removed Content: %s", frameSession.get(0) + " " + Arrays.toString(data)); } } @CalledByNative private void didRemoveSession(Object[] session) { FrameSession frameSession = toFrameSession(session); String[] urls = buildUrls(frameSession, null); for (ContentCaptureConsumer consumer : mContentCaptureConsumers) { if (consumer.shouldCapture(urls)) { consumer.onSessionRemoved(frameSession); } } if (sDump.booleanValue()) Log.i(TAG, "Removed Session: %s", frameSession.get(0)); } @CalledByNative private void didUpdateTitle(ContentCaptureFrame mainFrame) { String[] urls = buildUrls(null, mainFrame); for (ContentCaptureConsumer consumer : mContentCaptureConsumers) { if (consumer.shouldCapture(urls)) { consumer.onTitleUpdated(mainFrame); } } if (sDump.booleanValue()) Log.i(TAG, "Updated Title: %s", mainFrame.getTitle()); } @CalledByNative private void didUpdateFavicon(ContentCaptureFrame mainFrame) { String[] urls = buildUrls(null, mainFrame); for (ContentCaptureConsumer consumer : mContentCaptureConsumers) { if (consumer.shouldCapture(urls)) { consumer.onFaviconUpdated(mainFrame); } } if (sDump.booleanValue()) Log.i(TAG, "Updated Favicon: %s", mainFrame.getFavicon()); } @CalledByNative private int getOffsetY(WebContents webContents) { return RenderCoordinates.fromWebContents(webContents).getContentOffsetYPixInt(); } @CalledByNative private boolean shouldCapture(String url) { String[] urls = new String[] {url}; for (ContentCaptureConsumer consumer : mContentCaptureConsumers) { if (consumer.shouldCapture(urls)) return true; } return false; } private FrameSession toFrameSession(Object[] session) { FrameSession frameSession = new FrameSession(session.length); for (Object s : session) frameSession.add((ContentCaptureFrame) s); return frameSession; } private String[] buildUrls(FrameSession session, ContentCaptureFrame data) { ArrayList<String> urls = new ArrayList<String>(); if (session != null) { for (ContentCaptureFrame d : session) { urls.add(d.getUrl()); } } if (data != null) urls.add(data.getUrl()); String[] result = new String[urls.size()]; urls.toArray(result); return result; } @VisibleForTesting(otherwise = VisibleForTesting.NONE) public List<ContentCaptureConsumer> getConsumersForTesting() { return mContentCaptureConsumers; } @VisibleForTesting(otherwise = VisibleForTesting.NONE) public void removePlatformConsumerForTesting() { for (ContentCaptureConsumer consumer : mContentCaptureConsumers) { if (consumer instanceof PlatformContentCaptureConsumer) { mContentCaptureConsumers.remove(consumer); return; } } } @NativeMethods interface Natives { long init(OnscreenContentProvider caller, WebContents webContents); void onWebContentsChanged( long nativeOnscreenContentProviderAndroid, WebContents webContents); void destroy(long nativeOnscreenContentProviderAndroid); } }
package osu.crowd_ml.trainers; import android.util.Log; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Random; import osu.crowd_ml.BuildConfig; import osu.crowd_ml.Parameters; import osu.crowd_ml.TrainingDataIO; import osu.crowd_ml.loss_functions.LossFunction; import osu.crowd_ml.noise_distributions.Distribution; import osu.crowd_ml.utils.ArrayUtils; /* Copyright 2017 Crowd-ML team Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License */ public class InternalTrainer implements Trainer { private static InternalTrainer instance = null; private List<Integer> order; private Parameters params; private List<Double> weights; private int t; private int length; private InternalTrainer(){} public static Trainer getInstance() { if (instance == null) { instance = new InternalTrainer(); } return instance; } public List<Double> getNoisyGrad() { maintainSampleOrder(); // this line ensures that order is never null or empty List<Integer> oldOrder = new ArrayList<>(order); List<Double> noisyGrad = computeNoisyGrad(); if (Thread.currentThread().isInterrupted()){ order = oldOrder; } return noisyGrad; } /** * @param numIterations number of training steps to run. * @return The updated weight matrix. */ @Override public List<Double> train(final int numIterations) { maintainSampleOrder(); // This line ensures order is never null or empty // Cache the old order list if we need to rollback changes. List<Integer> oldOrder = new ArrayList<>(order); for (int i = 0; i < numIterations; i++) { if (Thread.currentThread().isInterrupted()) { break; } // Compute the gradient with random noise added List<Double> noisyGrad = computeNoisyGrad(); // Return the updated weights weights = calcWeight(noisyGrad); Log.d("sendWeight", "local iter: " + (i + 1)); } // Thread was stopped early if (Thread.currentThread().isInterrupted()) { order = oldOrder; } return weights; } @Override public Trainer setIter(int t) { this.t = t; return getInstance(); } @Override public Trainer setWeights(List<Double> weights) { if (BuildConfig.DEBUG && weights.size() <= 0) throw new AssertionError(); this.weights = weights; this.length = weights.size(); return getInstance(); } @Override public Trainer setParams(Parameters params) { this.params = params; return getInstance(); } @Override public void destroy() { order = null; weights = null; params = null; instance = null; } private List<Double> calcWeight(List<Double> grad){ double c = params.getC(); double epsilson = params.getEps(); String descentAlg = params.getDescentAlg(); List<Double> newWeight = new ArrayList<>(length); double[] learningRate = null; if (descentAlg.equals("adagrad") || descentAlg.equals("rmsProp")){ learningRate = new double[length]; } for(int i = 0; i < length; i ++) { if (Thread.currentThread().isInterrupted()){ break; } double deltaW; if (descentAlg.equals("constant")) { deltaW = c * grad.get(i); } else if (descentAlg.equals("simple")) { deltaW = (c / t) * grad.get(i); } else if (descentAlg.equals("sqrt")) { deltaW = (c / Math.sqrt(t)) * grad.get(i); } else if (descentAlg.equals("adagrad")) { double adagradRate = learningRate[i] + grad.get(i) * grad.get(i); learningRate[i] = c / Math.sqrt(adagradRate + epsilson); deltaW = learningRate[i] * grad.get(i); } else if (descentAlg.equals("rmsProp")) { double rmsRate = 0.9 * learningRate[i] + 0.1 * grad.get(i) * grad.get(i); learningRate[i] = c / Math.sqrt(rmsRate + epsilson); deltaW = learningRate[i] * grad.get(i); } else { Log.e("InternalTrainer", "Invalid descent algorithm. Defaulting to \'simple\'."); deltaW = (c / t) * grad.get(i); } newWeight.add(i, weights.get(i) - deltaW); } return newWeight; } private List<Double> computeNoisyGrad(){ // Init training sample batch int[] batchSamples = gatherBatchSamples(); // TODO(tylermzeller) this is a bottleneck on physical devices. Buffered file I/O seems to // invoke the GC often. // Get training sample features. List<double[]> xBatch = TrainingDataIO.getInstance().readSamples(batchSamples, params); // Get training sample labels. List<Integer> yBatch = TrainingDataIO.getInstance().readLabels(batchSamples, params); // Compute average gradient vector List<Double> avgGrad = computeAverageGrad(xBatch, yBatch); // Init empty noisy gradient vector List<Double> noisyGrad = new ArrayList<>(length); // Add random noise probed from the client's noise distribution. Distribution dist = params.getNoiseDistribution(); for (int i = 0; i < length; i++) { double avg = avgGrad.get(i); if (Thread.currentThread().isInterrupted()) { break; } noisyGrad.add(i, dist.noise(avg, params.getNoiseScale())); } return noisyGrad; } private int[] gatherBatchSamples() { int batchSize = params.getClientBatchSize(); int[] batchSamples = new int[batchSize]; Random r = new Random(); // rng // Loop batchSize times for (int i = 0; i < batchSize; i++) { // Calling this method here ensures that the order list is never empty. When the order // list becomes empty, a new epoch of training occurs as the list is repopulated with // random int values in the range [0, N). maintainSampleOrder(); // get a random index in the range [0, |order|) to query the order list. int q = r.nextInt(order.size()); // Remove the value at index q and add it to the current batch of samples. batchSamples[i] = order.remove(q); } ArrayUtils.sort(batchSamples); return batchSamples; } private List<Double> computeAverageGrad(List<double[]> X, List<Integer> Y) { int batchSize = params.getClientBatchSize(); LossFunction loss = params.getLossFunction(); int D = params.getD(); int K = params.getK(); double L = params.getL(); int nh = params.getNH(); // Init average gradient vector List<Double> avgGrad = new ArrayList<>(Collections.nCopies(length, 0.0d)); // For each sample, compute the gradient averaged over the whole batch. double[] x; List<Double> grad; for(int i = 0; i < batchSize; i++){ // Periodically check if this thread has been interrupted. See the javadocs on // threading for best practices. if (Thread.currentThread().isInterrupted()){ break; } x = X.get(i); // current sample feature int y = Y.get(i); // current label // Compute the gradient. grad = loss.gradient(weights, x, y, D, K, L, nh); // Add the current normalized gradient to the avg gradient vector. for(int j = 0; j < length; j++) { avgGrad.set(j, (avgGrad.get(j) + grad.get(j)) / batchSize); } } return avgGrad; } /** * Maintains the sample order list. * * The sample order list is queried for random indices of training samples without replacement * (until all values are removed, that is). * * If order is null or empty, the list will be filled with int values in the range [0, N), then * shuffled. */ private void maintainSampleOrder() { // Step 1. Ensure the order list is initialized. if (order == null) { order = new ArrayList<>(); } // Step 2. If the order list is empty, fill with values in the range [0, N). if (order.isEmpty()) { for (int i = 0; i < params.getN(); i++) //create sequential list of input sample #s order.add(i); // Step 3. Randomize order. Collections.shuffle(order); } } }
/*! ****************************************************************************** * * Pentaho Data Integration * * Copyright (C) 2002-2016 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.trans.steps.monetdbbulkloader; import org.pentaho.di.core.CheckResult; import org.pentaho.di.core.CheckResultInterface; import org.pentaho.di.core.Const; import org.pentaho.di.core.util.Utils; import org.pentaho.di.core.KettleAttributeInterface; import org.pentaho.di.core.ProvidesDatabaseConnectionInformation; import org.pentaho.di.core.SQLStatement; import org.pentaho.di.core.database.Database; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.database.MonetDBDatabaseMeta; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleStepException; import org.pentaho.di.core.exception.KettleXMLException; import org.pentaho.di.core.row.RowMeta; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.repository.ObjectId; import org.pentaho.di.repository.Repository; import org.pentaho.di.shared.SharedObjectInterface; import org.pentaho.di.trans.DatabaseImpact; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.BaseStepMeta; import org.pentaho.di.trans.step.StepDataInterface; import org.pentaho.di.trans.step.StepInjectionMetaEntry; import org.pentaho.di.trans.step.StepInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.step.StepMetaInjectionInterface; import org.pentaho.di.trans.step.StepMetaInterface; import org.pentaho.metastore.api.IMetaStore; import org.w3c.dom.Node; import java.util.List; /** * Created on 20-feb-2007 * * @author Sven Boden */ public class MonetDBBulkLoaderMeta extends BaseStepMeta implements StepMetaInjectionInterface, StepMetaInterface, ProvidesDatabaseConnectionInformation { private static Class<?> PKG = MonetDBBulkLoaderMeta.class; // for i18n purposes, needed by Translator2!! /** * The database connection name * */ private String dbConnectionName; /** * what's the schema for the target? */ private String schemaName; /** * what's the table for the target? */ private String tableName; /** * Path to the log file */ private String logFile; /** * database connection */ private DatabaseMeta databaseMeta; /** * Field name of the target table */ private String[] fieldTable; /** * Field name in the stream */ private String[] fieldStream; /** * flag to indicate that the format is OK for MonetDB */ private boolean[] fieldFormatOk; /** * Field separator character or string used to delimit fields */ private String fieldSeparator; /** * Specifies which character surrounds each field's data. i.e. double quotes, single quotes or something else */ private String fieldEnclosure; /** * How are NULLs represented as text to the MonetDB API or mclient i.e. can be an empty string or something else the * value is written out as text to the API and MonetDB is able to interpret it to the correct representation of NULL * in the database for the given column type. */ private String NULLrepresentation; /** * Encoding to use */ private String encoding; /** * Truncate table? */ private boolean truncate = false; /** * Fully Quote SQL used in the step? */ private boolean fullyQuoteSQL; /** * Auto adjust the table structure? */ private boolean autoSchema = false; /** * Auto adjust strings that are too long? */ private boolean autoStringWidths = false; public boolean isAutoStringWidths() { return autoStringWidths; } public void setAutoStringWidths( boolean autoStringWidths ) { this.autoStringWidths = autoStringWidths; } public boolean isTruncate() { return truncate; } public void setTruncate( boolean truncate ) { this.truncate = truncate; } public boolean isFullyQuoteSQL() { return fullyQuoteSQL; } public void setFullyQuoteSQL( boolean fullyQuoteSQLbool ) { this.fullyQuoteSQL = fullyQuoteSQLbool; } public boolean isAutoSchema() { return autoSchema; } public void setAutoSchema( boolean autoSchema ) { this.autoSchema = autoSchema; } /** * The number of rows to buffer before passing them over to MonetDB. This number should be non-zero since we need to * specify the number of rows we pass. */ private String bufferSize; /** * The indicator defines that it is used the version of <i>MonetBD Jan2014-SP2</i> or later if it is <code>true</code>. * <code>False</code> indicates about using all MonetDb versions before this one. */ private boolean compatibilityDbVersionMode = false; public MonetDBBulkLoaderMeta() { super(); } /** * @return Returns the database. */ public DatabaseMeta getDatabaseMeta() { return databaseMeta; } /** * @return Returns the database. */ public DatabaseMeta getDatabaseMeta( MonetDBBulkLoader loader ) { return databaseMeta; } /** * @param database The database to set. */ public void setDatabaseMeta( DatabaseMeta database ) { this.databaseMeta = database; } /** * @return Returns the tableName. */ public String getTableName() { return tableName; } /** * @param tableName The tableName to set. */ public void setTableName( String tableName ) { this.tableName = tableName; } /** * @return Returns the fieldTable. */ public String[] getFieldTable() { return fieldTable; } /** * @param fieldTable The fieldTable to set. */ public void setFieldTable( String[] fieldTable ) { this.fieldTable = fieldTable; } /** * @return Returns the fieldStream. */ public String[] getFieldStream() { return fieldStream; } /** * @param fieldStream The fieldStream to set. */ public void setFieldStream( String[] fieldStream ) { this.fieldStream = fieldStream; } public void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ) throws KettleXMLException { readData( stepnode, databases ); } public void allocate( int nrvalues ) { fieldTable = new String[nrvalues]; fieldStream = new String[nrvalues]; fieldFormatOk = new boolean[nrvalues]; } public Object clone() { MonetDBBulkLoaderMeta retval = (MonetDBBulkLoaderMeta) super.clone(); int nrvalues = fieldTable.length; retval.allocate( nrvalues ); System.arraycopy( fieldTable, 0, retval.fieldTable, 0, nrvalues ); System.arraycopy( fieldStream, 0, retval.fieldStream, 0, nrvalues ); System.arraycopy( fieldFormatOk, 0, retval.fieldFormatOk, 0, nrvalues ); return retval; } private void readData( Node stepnode, List<? extends SharedObjectInterface> databases ) throws KettleXMLException { try { dbConnectionName = XMLHandler.getTagValue( stepnode, "connection" ); databaseMeta = DatabaseMeta.findDatabase( databases, dbConnectionName ); schemaName = XMLHandler.getTagValue( stepnode, "schema" ); tableName = XMLHandler.getTagValue( stepnode, "table" ); bufferSize = XMLHandler.getTagValue( stepnode, "buffer_size" ); logFile = XMLHandler.getTagValue( stepnode, "log_file" ); truncate = "Y".equals( XMLHandler.getTagValue( stepnode, "truncate" ) ); // New in January 2013 Updates - For compatibility we set default values according to the old version of the step. // // This expression will only be true if a yes answer was previously recorded. fullyQuoteSQL = "Y".equals( XMLHandler.getTagValue( stepnode, "fully_quote_sql" ) ); fieldSeparator = XMLHandler.getTagValue( stepnode, "field_separator" ); if ( fieldSeparator == null ) { fieldSeparator = "|"; } fieldEnclosure = XMLHandler.getTagValue( stepnode, "field_enclosure" ); if ( fieldEnclosure == null ) { fieldEnclosure = "\""; } NULLrepresentation = XMLHandler.getTagValue( stepnode, "null_representation" ); if ( NULLrepresentation == null ) { NULLrepresentation = "null"; } encoding = XMLHandler.getTagValue( stepnode, "encoding" ); if ( encoding == null ) { encoding = "UTF-8"; } // Old functionality. Always commented out. It may be safe to remove all of th // autoSchema = "Y".equals(XMLHandler.getTagValue(stepnode, "auto_schema")); // autoStringWidths = "Y".equals(XMLHandler.getTagValue(stepnode, "auto_string_widths")); int nrvalues = XMLHandler.countNodes( stepnode, "mapping" ); allocate( nrvalues ); for ( int i = 0; i < nrvalues; i++ ) { Node vnode = XMLHandler.getSubNodeByNr( stepnode, "mapping", i ); fieldTable[i] = XMLHandler.getTagValue( vnode, "stream_name" ); fieldStream[i] = XMLHandler.getTagValue( vnode, "field_name" ); if ( fieldStream[i] == null ) { fieldStream[i] = fieldTable[i]; // default: the same name! } fieldFormatOk[i] = "Y".equalsIgnoreCase( XMLHandler.getTagValue( vnode, "field_format_ok" ) ); } } catch ( Exception e ) { throw new KettleXMLException( BaseMessages.getString( PKG, "MonetDBBulkLoaderMeta.Exception.UnableToReadStepInfoFromXML" ), e ); } } public void setDefault() { fieldTable = null; databaseMeta = null; schemaName = ""; tableName = BaseMessages.getString( PKG, "MonetDBBulkLoaderMeta.DefaultTableName" ); bufferSize = "100000"; logFile = ""; truncate = false; fullyQuoteSQL = true; // MonetDB safe defaults. fieldSeparator = "|"; fieldEnclosure = "\""; NULLrepresentation = ""; encoding = "UTF-8"; allocate( 0 ); } public String getXML() { StringBuilder retval = new StringBuilder( 300 ); // General Settings Tab retval.append( " " ).append( XMLHandler.addTagValue( "connection", dbConnectionName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "buffer_size", bufferSize ) ); retval.append( " " ).append( XMLHandler.addTagValue( "schema", schemaName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "table", tableName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "log_file", logFile ) ); retval.append( " " ).append( XMLHandler.addTagValue( "truncate", truncate ) ); retval.append( " " ).append( XMLHandler.addTagValue( "fully_quote_sql", fullyQuoteSQL ) ); // MonetDB Settings Tab retval.append( " " ).append( XMLHandler.addTagValue( "field_separator", fieldSeparator ) ); retval.append( " " ).append( XMLHandler.addTagValue( "field_enclosure", fieldEnclosure ) ); retval.append( " " ).append( XMLHandler.addTagValue( "null_representation", NULLrepresentation ) ); retval.append( " " ).append( XMLHandler.addTagValue( "encoding", encoding ) ); // Output Fields Tab for ( int i = 0; i < fieldTable.length; i++ ) { retval.append( " <mapping>" ).append( Const.CR ); retval.append( " " ).append( XMLHandler.addTagValue( "stream_name", fieldTable[i] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "field_name", fieldStream[i] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "field_format_ok", fieldFormatOk[i] ) ); retval.append( " </mapping>" ).append( Const.CR ); } return retval.toString(); } public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ) throws KettleException { try { databaseMeta = rep.loadDatabaseMetaFromStepAttribute( id_step, "id_connection", databases ); bufferSize = rep.getStepAttributeString( id_step, "buffer_size" ); dbConnectionName = rep.getStepAttributeString( id_step, "db_connection_name" ); schemaName = rep.getStepAttributeString( id_step, "schema" ); tableName = rep.getStepAttributeString( id_step, "table" ); logFile = rep.getStepAttributeString( id_step, "log_file" ); // The following default assignments are for backward compatibility with files saved under PDI version 4.4 files fieldSeparator = rep.getStepAttributeString( id_step, "field_separator" ); if ( fieldSeparator == null ) { fieldEnclosure = "\""; } fieldEnclosure = rep.getStepAttributeString( id_step, "field_enclosure" ); if ( fieldEnclosure == null ) { fieldEnclosure = "\""; } NULLrepresentation = rep.getStepAttributeString( id_step, "null_representation" ); if ( NULLrepresentation == null ) { NULLrepresentation = ""; } encoding = rep.getStepAttributeString( id_step, "encoding" ); if ( encoding == null ) { encoding = "UTF-8"; } truncate = Boolean.parseBoolean( rep.getStepAttributeString( id_step, "truncate" ) ); // This expression will only return true if a yes value was previously recorded; false otherwise. fullyQuoteSQL = Boolean.parseBoolean( rep.getStepAttributeString( id_step, "fully_quote_sql" ) ); int nrvalues = rep.countNrStepAttributes( id_step, "stream_name" ); allocate( nrvalues ); for ( int i = 0; i < nrvalues; i++ ) { fieldTable[i] = rep.getStepAttributeString( id_step, i, "stream_name" ); fieldStream[i] = rep.getStepAttributeString( id_step, i, "field_name" ); if ( fieldStream[i] == null ) { fieldStream[i] = fieldTable[i]; } fieldFormatOk[i] = rep.getStepAttributeBoolean( id_step, i, "field_format_ok" ); } } catch ( Exception e ) { throw new KettleException( BaseMessages.getString( PKG, "MonetDBBulkLoaderMeta.Exception.UnexpectedErrorReadingStepInfoFromRepository" ), e ); } } public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ) throws KettleException { try { rep.saveDatabaseMetaStepAttribute( id_transformation, id_step, "id_connection", databaseMeta ); // General Settings Tab rep.saveStepAttribute( id_transformation, id_step, "db_connection_name", dbConnectionName ); rep.saveStepAttribute( id_transformation, id_step, "schema", schemaName ); rep.saveStepAttribute( id_transformation, id_step, "table", tableName ); rep.saveStepAttribute( id_transformation, id_step, "buffer_size", bufferSize ); rep.saveStepAttribute( id_transformation, id_step, "log_file", logFile ); rep.saveStepAttribute( id_transformation, id_step, "truncate", truncate ); rep.saveStepAttribute( id_transformation, id_step, "fully_quote_sql", fullyQuoteSQL ); // MonetDB Settings Tab rep.saveStepAttribute( id_transformation, id_step, "field_separator", fieldSeparator ); rep.saveStepAttribute( id_transformation, id_step, "field_enclosure", fieldEnclosure ); rep.saveStepAttribute( id_transformation, id_step, "null_representation", NULLrepresentation ); rep.saveStepAttribute( id_transformation, id_step, "encoding", encoding ); // Output Fields Tab for ( int i = 0; i < fieldTable.length; i++ ) { rep.saveStepAttribute( id_transformation, id_step, i, "stream_name", fieldTable[i] ); rep.saveStepAttribute( id_transformation, id_step, i, "field_name", fieldStream[i] ); rep.saveStepAttribute( id_transformation, id_step, i, "field_format_ok", fieldFormatOk[i] ); } // Also, save the step-database relationship! if ( databaseMeta != null ) { rep.insertStepDatabase( id_transformation, id_step, databaseMeta.getObjectId() ); } } catch ( Exception e ) { throw new KettleException( BaseMessages.getString( PKG, "MonetDBBulkLoaderMeta.Exception.UnableToSaveStepInfoToRepository" ) + id_step, e ); } } public void getFields( RowMetaInterface rowMeta, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { // Default: nothing changes to rowMeta } public void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ) { CheckResult cr; String error_message = ""; if ( databaseMeta != null ) { Database db = new Database( loggingObject, databaseMeta ); db.shareVariablesWith( transMeta ); try { db.connect(); if ( !Utils.isEmpty( tableName ) ) { cr = new CheckResult( CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString( PKG, "MonetDBBulkLoaderMeta.CheckResult.TableNameOK" ), stepMeta ); remarks.add( cr ); boolean first = true; boolean error_found = false; error_message = ""; // Check fields in table String schemaTable = databaseMeta.getQuotedSchemaTableCombination( transMeta.environmentSubstitute( schemaName ), transMeta.environmentSubstitute( tableName ) ); RowMetaInterface r = db.getTableFields( schemaTable ); if ( r != null ) { cr = new CheckResult( CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString( PKG, "MonetDBBulkLoaderMeta.CheckResult.TableExists" ), stepMeta ); remarks.add( cr ); // How about the fields to insert/dateMask in the table? first = true; error_found = false; error_message = ""; for ( int i = 0; i < fieldTable.length; i++ ) { String field = fieldTable[i]; ValueMetaInterface v = r.searchValueMeta( field ); if ( v == null ) { if ( first ) { first = false; error_message += BaseMessages.getString( PKG, "MonetDBBulkLoaderMeta.CheckResult.MissingFieldsToLoadInTargetTable" ) + Const.CR; } error_found = true; error_message += "\t\t" + field + Const.CR; } } if ( error_found ) { cr = new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, error_message, stepMeta ); } else { cr = new CheckResult( CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString( PKG, "MonetDBBulkLoaderMeta.CheckResult.AllFieldsFoundInTargetTable" ), stepMeta ); } remarks.add( cr ); } else { error_message = BaseMessages.getString( PKG, "MonetDBBulkLoaderMeta.CheckResult.CouldNotReadTableInfo" ); cr = new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, error_message, stepMeta ); remarks.add( cr ); } } // Look up fields in the input stream <prev> if ( prev != null && prev.size() > 0 ) { cr = new CheckResult( CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString( PKG, "MonetDBBulkLoaderMeta.CheckResult.StepReceivingDatas", prev.size() + "" ), stepMeta ); remarks.add( cr ); boolean first = true; error_message = ""; boolean error_found = false; for ( int i = 0; i < fieldStream.length; i++ ) { ValueMetaInterface v = prev.searchValueMeta( fieldStream[i] ); if ( v == null ) { if ( first ) { first = false; error_message += BaseMessages.getString( PKG, "MonetDBBulkLoaderMeta.CheckResult.MissingFieldsInInput" ) + Const.CR; } error_found = true; error_message += "\t\t" + fieldStream[i] + Const.CR; } } if ( error_found ) { cr = new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, error_message, stepMeta ); } else { cr = new CheckResult( CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString( PKG, "MonetDBBulkLoaderMeta.CheckResult.AllFieldsFoundInInput" ), stepMeta ); } remarks.add( cr ); } else { error_message = BaseMessages.getString( PKG, "MonetDBBulkLoaderMeta.CheckResult.MissingFieldsInInput3" ) + Const.CR; cr = new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, error_message, stepMeta ); remarks.add( cr ); } } catch ( KettleException e ) { error_message = BaseMessages.getString( PKG, "MonetDBBulkLoaderMeta.CheckResult.DatabaseErrorOccurred" ) + e.getMessage(); cr = new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, error_message, stepMeta ); remarks.add( cr ); } finally { db.disconnect(); } } else { error_message = BaseMessages.getString( PKG, "MonetDBBulkLoaderMeta.CheckResult.InvalidConnection" ); cr = new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, error_message, stepMeta ); remarks.add( cr ); } // See if we have input streams leading to this step! if ( input.length > 0 ) { cr = new CheckResult( CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString( PKG, "MonetDBBulkLoaderMeta.CheckResult.StepReceivingInfoFromOtherSteps" ), stepMeta ); remarks.add( cr ); } else { cr = new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "MonetDBBulkLoaderMeta.CheckResult.NoInputError" ), stepMeta ); remarks.add( cr ); } } public SQLStatement getTableDdl( TransMeta transMeta, String stepname, boolean autoSchema, MonetDBBulkLoaderData data, boolean safeMode ) throws KettleException { String name = stepname; // new name might not yet be linked to other steps! StepMeta stepMeta = new StepMeta( BaseMessages.getString( PKG, "MonetDBBulkLoaderDialog.StepMeta.Title" ), name, this ); RowMetaInterface prev = transMeta.getPrevStepFields( stepname ); SQLStatement sql = getSQLStatements( transMeta, stepMeta, prev, autoSchema, data, safeMode ); return sql; } public RowMetaInterface updateFields( TransMeta transMeta, String stepname, MonetDBBulkLoaderData data ) throws KettleStepException { RowMetaInterface prev = transMeta.getPrevStepFields( stepname ); return updateFields( prev, data ); } public RowMetaInterface updateFields( RowMetaInterface prev, MonetDBBulkLoaderData data ) { // update the field table from the fields coming from the previous step RowMetaInterface tableFields = new RowMeta(); List<ValueMetaInterface> fields = prev.getValueMetaList(); fieldTable = new String[fields.size()]; fieldStream = new String[fields.size()]; fieldFormatOk = new boolean[fields.size()]; int idx = 0; for ( ValueMetaInterface field : fields ) { ValueMetaInterface tableField = field.clone(); tableFields.addValueMeta( tableField ); fieldTable[idx] = field.getName(); fieldStream[idx] = field.getName(); fieldFormatOk[idx] = true; idx++; } data.keynrs = new int[getFieldStream().length]; for ( int i = 0; i < data.keynrs.length; i++ ) { data.keynrs[i] = i; } return tableFields; } public SQLStatement getSQLStatements( TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, boolean autoSchema, MonetDBBulkLoaderData data, boolean safeMode ) throws KettleStepException { SQLStatement retval = new SQLStatement( stepMeta.getName(), databaseMeta, null ); // default: nothing to do! if ( databaseMeta != null ) { if ( prev != null && prev.size() > 0 ) { // Copy the row RowMetaInterface tableFields; if ( autoSchema ) { tableFields = updateFields( prev, data ); } else { tableFields = new RowMeta(); // Now change the field names for ( int i = 0; i < fieldTable.length; i++ ) { ValueMetaInterface v = prev.searchValueMeta( fieldStream[i] ); if ( v != null ) { ValueMetaInterface tableField = v.clone(); tableField.setName( fieldTable[i] ); tableFields.addValueMeta( tableField ); } } } if ( !Utils.isEmpty( tableName ) ) { Database db = new Database( loggingObject, databaseMeta ); db.shareVariablesWith( transMeta ); try { db.connect(); String schemaTable = databaseMeta.getQuotedSchemaTableCombination( transMeta.environmentSubstitute( schemaName ), transMeta.environmentSubstitute( tableName ) ); MonetDBDatabaseMeta.safeModeLocal.set( safeMode ); String cr_table = db.getDDL( schemaTable, tableFields, null, false, null, true ); String sql = cr_table; if ( sql.length() == 0 ) { retval.setSQL( null ); } else { retval.setSQL( sql ); } } catch ( KettleException e ) { retval.setError( BaseMessages.getString( PKG, "MonetDBBulkLoaderMeta.GetSQL.ErrorOccurred" ) + e.getMessage() ); } finally { db.disconnect(); MonetDBDatabaseMeta.safeModeLocal.remove(); } } else { retval .setError( BaseMessages.getString( PKG, "MonetDBBulkLoaderMeta.GetSQL.NoTableDefinedOnConnection" ) ); } } else { retval.setError( BaseMessages.getString( PKG, "MonetDBBulkLoaderMeta.GetSQL.NotReceivingAnyFields" ) ); } } else { retval.setError( BaseMessages.getString( PKG, "MonetDBBulkLoaderMeta.GetSQL.NoConnectionDefined" ) ); } return retval; } public void analyseImpact( List<DatabaseImpact> impact, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, Repository repository, IMetaStore metaStore ) throws KettleStepException { if ( prev != null ) { /* DEBUG CHECK THIS */ // Insert dateMask fields : read/write for ( int i = 0; i < fieldTable.length; i++ ) { ValueMetaInterface v = prev.searchValueMeta( fieldStream[i] ); DatabaseImpact ii = new DatabaseImpact( DatabaseImpact.TYPE_IMPACT_READ_WRITE, transMeta.getName(), stepMeta.getName(), databaseMeta .getDatabaseName(), transMeta.environmentSubstitute( tableName ), fieldTable[i], fieldStream[i], v != null ? v.getOrigin() : "?", "", "Type = " + v.toStringMeta() ); impact.add( ii ); } } } public StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta, Trans trans ) { return new MonetDBBulkLoader( stepMeta, stepDataInterface, cnr, transMeta, trans ); } public StepDataInterface getStepData() { return new MonetDBBulkLoaderData(); } public DatabaseMeta[] getUsedDatabaseConnections() { if ( databaseMeta != null ) { return new DatabaseMeta[] { databaseMeta }; } else { return super.getUsedDatabaseConnections(); } } public RowMetaInterface getRequiredFields( VariableSpace space ) throws KettleException { String realTableName = space.environmentSubstitute( tableName ); String realSchemaName = space.environmentSubstitute( schemaName ); if ( databaseMeta != null ) { Database db = new Database( loggingObject, databaseMeta ); try { db.connect(); if ( !Utils.isEmpty( realTableName ) ) { String schemaTable = databaseMeta.getQuotedSchemaTableCombination( realSchemaName, realTableName ); // Check if this table exists... if ( db.checkTableExists( schemaTable ) ) { return db.getTableFields( schemaTable ); } else { throw new KettleException( BaseMessages.getString( PKG, "MonetDBBulkLoaderMeta.Exception.TableNotFound" ) ); } } else { throw new KettleException( BaseMessages.getString( PKG, "MonetDBBulkLoaderMeta.Exception.TableNotSpecified" ) ); } } catch ( Exception e ) { throw new KettleException( BaseMessages.getString( PKG, "MonetDBBulkLoaderMeta.Exception.ErrorGettingFields" ), e ); } finally { db.disconnect(); } } else { throw new KettleException( BaseMessages.getString( PKG, "MonetDBBulkLoaderMeta.Exception.ConnectionNotDefined" ) ); } } /** * @return the schemaName */ public String getSchemaName() { return schemaName; } /** * @param schemaName the schemaName to set */ public void setSchemaName( String schemaName ) { this.schemaName = schemaName; } public String getLogFile() { return logFile; } public void setLogFile( String logFile ) { this.logFile = logFile; } public String getFieldSeparator() { return fieldSeparator; } public void setFieldSeparator( String fieldSeparatorStr ) { this.fieldSeparator = fieldSeparatorStr; } public String getFieldEnclosure() { return fieldEnclosure; } public void setFieldEnclosure( String fieldEnclosureStr ) { this.fieldEnclosure = fieldEnclosureStr; } public String getNULLrepresentation() { return NULLrepresentation; } public void setNULLrepresentation( String NULLrepresentationStr ) { this.NULLrepresentation = NULLrepresentationStr; } public String getEncoding() { return encoding; } public void setEncoding( String encoding ) { this.encoding = encoding; } /** * @return the bufferSize */ public String getBufferSize() { return bufferSize; } /** * @param bufferSize the bufferSize to set */ public void setBufferSize( String bufferSize ) { this.bufferSize = bufferSize; } /** * @return the fieldFormatOk */ public boolean[] getFieldFormatOk() { return fieldFormatOk; } /** * @param fieldFormatOk the fieldFormatOk to set */ public void setFieldFormatOk( boolean[] fieldFormatOk ) { this.fieldFormatOk = fieldFormatOk; } @Override public String getMissingDatabaseConnectionInformationMessage() { // TODO return null; } /** * @param database connection name to set */ public void setDbConnectionName( String dbConnectionName ) { this.dbConnectionName = dbConnectionName; } /** * @return the database connection name */ public String getDbConnectionName() { return this.dbConnectionName; } public StepMetaInjectionInterface getStepMetaInjectionInterface() { return this; } /** * Describe the metadata attributes that can be injected into this step metadata object. */ public List<StepInjectionMetaEntry> getStepInjectionMetadataEntries() { return getStepInjectionMetadataEntries( PKG ); } public void injectStepMetadataEntries( List<StepInjectionMetaEntry> metadata ) { for ( StepInjectionMetaEntry entry : metadata ) { KettleAttributeInterface attr = findAttribute( entry.getKey() ); // Set top level attributes... // if ( entry.getValueType() != ValueMetaInterface.TYPE_NONE ) { if ( entry.getKey().equals( "SCHEMA" ) ) { schemaName = (String) entry.getValue(); } else if ( entry.getKey().equals( "TABLE" ) ) { tableName = (String) entry.getValue(); } else if ( entry.getKey().equals( "LOGFILE" ) ) { logFile = (String) entry.getValue(); } else if ( entry.getKey().equals( "FIELD_SEPARATOR" ) ) { fieldSeparator = (String) entry.getValue(); } else if ( entry.getKey().equals( "FIELD_ENCLOSURE" ) ) { fieldEnclosure = (String) entry.getValue(); } else if ( entry.getKey().equals( "NULL_REPRESENTATION" ) ) { setNULLrepresentation( (String) entry.getValue() ); } else if ( entry.getKey().equals( "ENCODING" ) ) { encoding = (String) entry.getValue(); } else if ( entry.getKey().equals( "BUFFER_SIZE" ) ) { bufferSize = (String) entry.getValue(); } else if ( entry.getKey().equals( "TRUNCATE" ) ) { truncate = (Boolean) entry.getValue(); } else if ( entry.getKey().equals( "FULLY_QUOTE_SQL" ) ) { fullyQuoteSQL = (Boolean) entry.getValue(); } else { throw new RuntimeException( "Unhandled metadata injection of attribute: " + attr.toString() + " - " + attr.getDescription() ); } } else { // The data sets... // if ( attr.getKey().equals( "MAPPINGS" ) ) { List<StepInjectionMetaEntry> selectMappings = entry.getDetails(); fieldTable = new String[selectMappings.size()]; fieldStream = new String[selectMappings.size()]; fieldFormatOk = new boolean[selectMappings.size()]; for ( int row = 0; row < selectMappings.size(); row++ ) { StepInjectionMetaEntry selectField = selectMappings.get( row ); List<StepInjectionMetaEntry> fieldAttributes = selectField.getDetails(); //CHECKSTYLE:Indentation:OFF for ( int i = 0; i < fieldAttributes.size(); i++ ) { StepInjectionMetaEntry fieldAttribute = fieldAttributes.get( i ); KettleAttributeInterface fieldAttr = findAttribute( fieldAttribute.getKey() ); Object attributeValue = fieldAttribute.getValue(); if ( attributeValue == null ) { continue; } if ( fieldAttr.getKey().equals( "STREAMNAME" ) ) { getFieldStream()[row] = (String) attributeValue; } else if ( fieldAttr.getKey().equals( "FIELDNAME" ) ) { getFieldTable()[row] = (String) attributeValue; } else if ( fieldAttr.getKey().equals( "FIELD_FORMAT_OK" ) ) { getFieldFormatOk()[row] = (Boolean) attributeValue; } else { throw new RuntimeException( "Unhandled metadata injection of attribute: " + fieldAttr.toString() + " - " + fieldAttr.getDescription() ); } } } } if ( !Utils.isEmpty( getFieldStream() ) ) { for ( int i = 0; i < getFieldStream().length; i++ ) { logDetailed( "row " + Integer.toString( i ) + ": stream=" + getFieldStream()[i] + " : table=" + getFieldTable()[i] ); } } } } } public List<StepInjectionMetaEntry> extractStepMetadataEntries() throws KettleException { return null; } /** * Returns the version of MonetDB that is used. * * @return The version of MonetDB * @throws KettleException * if an error occurs */ private MonetDbVersion getMonetDBVersion() throws KettleException { Database db = null; db = new Database( loggingObject, databaseMeta ); try { db.connect(); return new MonetDbVersion( db.getDatabaseMetaData().getDatabaseProductVersion() ); } catch ( Exception e ) { throw new KettleException( e ); } finally { if ( db != null ) { db.disconnect(); } } } /** * Returns <code>true</code> if used the version of MonetBD Jan2014-SP2 or later, <code>false</code> otherwise. * * @return the compatibilityDbVersionMode */ public boolean isCompatibilityDbVersionMode() { return compatibilityDbVersionMode; } /** * Defines and sets <code>true</code> if it is used the version of <i>MonetBD Jan2014-SP2</i> or later, * <code>false</code> otherwise. Sets also <code>false</code> if it's impossible to define which version of db is * used. */ public void setCompatibilityDbVersionMode() { MonetDbVersion monetDBVersion; try { monetDBVersion = getMonetDBVersion(); this.compatibilityDbVersionMode = monetDBVersion.compareTo( MonetDbVersion.JAN_2014_SP2_DB_VERSION ) < 0 ? false : true; if ( isDebug() && this.compatibilityDbVersionMode ) { logDebug( BaseMessages.getString( PKG, "MonetDBVersion.Info.UsingCompatibilityMode", MonetDbVersion.JAN_2014_SP2_DB_VERSION ) ); } } catch ( KettleException e ) { if ( isDebug() ) { logDebug( BaseMessages.getString( PKG, "MonetDBBulkLoaderMeta.Exception.ErrorOnGettingDbVersion", e.getMessage() ) ); } } } }
/* * Copyright 2017 OICR * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.dockstore.webservice.resources; import static io.dockstore.webservice.helpers.statelisteners.RSSListener.RSS_KEY; import static io.dockstore.webservice.helpers.statelisteners.SitemapListener.SITEMAP_KEY; import com.codahale.metrics.annotation.Timed; import com.google.common.io.Resources; import io.dockstore.common.DescriptorLanguage; import io.dockstore.common.PipHelper; import io.dockstore.common.Registry; import io.dockstore.common.SourceControl; import io.dockstore.webservice.CustomWebApplicationException; import io.dockstore.webservice.DockstoreWebserviceApplication; import io.dockstore.webservice.DockstoreWebserviceConfiguration; import io.dockstore.webservice.api.Config; import io.dockstore.webservice.core.Collection; import io.dockstore.webservice.core.Entry; import io.dockstore.webservice.core.Organization; import io.dockstore.webservice.core.Tool; import io.dockstore.webservice.core.Workflow; import io.dockstore.webservice.core.database.RSSToolPath; import io.dockstore.webservice.core.database.RSSWorkflowPath; import io.dockstore.webservice.helpers.MetadataResourceHelper; import io.dockstore.webservice.helpers.PublicStateManager; import io.dockstore.webservice.helpers.statelisteners.RSSListener; import io.dockstore.webservice.helpers.statelisteners.SitemapListener; import io.dockstore.webservice.jdbi.BioWorkflowDAO; import io.dockstore.webservice.jdbi.CollectionDAO; import io.dockstore.webservice.jdbi.OrganizationDAO; import io.dockstore.webservice.jdbi.ToolDAO; import io.dockstore.webservice.languages.LanguageHandlerFactory; import io.dockstore.webservice.resources.proposedGA4GH.ToolsApiExtendedServiceFactory; import io.dockstore.webservice.resources.proposedGA4GH.ToolsExtendedApiService; import io.dockstore.webservice.resources.rss.RSSEntry; import io.dockstore.webservice.resources.rss.RSSFeed; import io.dockstore.webservice.resources.rss.RSSHeader; import io.dockstore.webservice.resources.rss.RSSWriter; import io.dropwizard.hibernate.UnitOfWork; import io.swagger.annotations.Api; import io.swagger.annotations.ApiOperation; import io.swagger.annotations.ApiParam; import io.swagger.v3.oas.annotations.Operation; import io.swagger.v3.oas.annotations.Parameter; import io.swagger.v3.oas.annotations.enums.ParameterIn; import io.swagger.v3.oas.annotations.media.ArraySchema; import io.swagger.v3.oas.annotations.media.Content; import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.responses.ApiResponse; import io.swagger.v3.oas.annotations.tags.Tag; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.lang.reflect.InvocationTargetException; import java.nio.charset.StandardCharsets; import java.time.Year; import java.util.ArrayList; import java.util.Arrays; import java.util.Calendar; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.SortedSet; import java.util.TreeSet; import java.util.stream.Collectors; import javax.ws.rs.DefaultValue; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.container.ContainerRequestContext; import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import okhttp3.Cache; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; import org.apache.http.HttpStatus; import org.hibernate.SessionFactory; import org.json.JSONArray; import org.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * @author dyuen */ @Path("/metadata") @Api("metadata") @Produces({MediaType.TEXT_HTML, MediaType.TEXT_XML}) @Tag(name = "metadata", description = ResourceConstants.METADATA) public class MetadataResource { public static final int RSS_ENTRY_LIMIT = 50; private static final Logger LOG = LoggerFactory.getLogger(MetadataResource.class); private final ToolsExtendedApiService delegate = ToolsApiExtendedServiceFactory.getToolsExtendedApi(); private final ToolDAO toolDAO; private final OrganizationDAO organizationDAO; private final CollectionDAO collectionDAO; private final BioWorkflowDAO bioWorkflowDAO; private final DockstoreWebserviceConfiguration config; private final SitemapListener sitemapListener; private final RSSListener rssListener; public MetadataResource(SessionFactory sessionFactory, DockstoreWebserviceConfiguration config) { this.toolDAO = new ToolDAO(sessionFactory); this.organizationDAO = new OrganizationDAO(sessionFactory); this.collectionDAO = new CollectionDAO(sessionFactory); this.config = config; this.bioWorkflowDAO = new BioWorkflowDAO(sessionFactory); this.sitemapListener = PublicStateManager.getInstance().getSitemapListener(); this.rssListener = PublicStateManager.getInstance().getRSSListener(); } @GET @Timed @UnitOfWork(readOnly = true) @Path("sitemap") @Operation(summary = "List all available workflow, tool, organization, and collection paths.", description = "List all available workflow, tool, organization, and collection paths. Available means published for tools/workflows, and approved for organizations and their respective collections. NO authentication") @ApiOperation(value = "List all available workflow, tool, organization, and collection paths.", notes = "List all available workflow, tool, organization, and collection paths. Available means published for tools/workflows, and approved for organizations and their respective collections.") public String sitemap() { try { SortedSet<String> sitemap = sitemapListener.getCache().get(SITEMAP_KEY, (k) -> getSitemap()); return String.join(System.lineSeparator(), sitemap); } catch (RuntimeException e) { throw new CustomWebApplicationException("Sitemap cache problems", HttpStatus.SC_INTERNAL_SERVER_ERROR); } } public SortedSet<String> getSitemap() { SortedSet<String> urls = new TreeSet<>(); urls.addAll(getToolPaths()); urls.addAll(getBioWorkflowPaths()); urls.addAll(getOrganizationAndCollectionPaths()); return urls; } /** * Adds organization and collection URLs * //TODO needs to be more efficient via JPA query */ private List<String> getOrganizationAndCollectionPaths() { List<String> urls = new ArrayList<>(); List<Organization> organizations = organizationDAO.findAllApproved(); organizations.forEach(organization -> { urls.add(createOrganizationURL(organization)); List<Collection> collections = collectionDAO.findAllByOrg(organization.getId()); collections.stream().map(collection -> createCollectionURL(collection, organization)).forEach(urls::add); }); return urls; } private List<String> getToolPaths() { return toolDAO.findAllPublishedPaths().stream().map(toolPath -> createToolURL(toolPath.getTool())).collect(Collectors.toList()); } private List<String> getBioWorkflowPaths() { return bioWorkflowDAO.findAllPublishedPaths().stream().map(workflowPath -> createWorkflowURL(workflowPath.getBioWorkflow())).collect( Collectors.toList()); } private String createOrganizationURL(Organization organization) { return MetadataResourceHelper.createOrganizationURL(organization); } private String createCollectionURL(Collection collection, Organization organization) { return MetadataResourceHelper.createCollectionURL(collection, organization); } private String createWorkflowURL(Workflow workflow) { return MetadataResourceHelper.createWorkflowURL(workflow); } private String createToolURL(Tool tool) { return MetadataResourceHelper.createToolURL(tool); } @GET @Timed @UnitOfWork(readOnly = true) @Path("rss") @Produces(MediaType.TEXT_XML) @Operation(summary = "List all published tools and workflows in creation order", description = "List all published tools and workflows in creation order, NO authentication") @ApiOperation(value = "List all published tools and workflows in creation order.", notes = "NO authentication") public String rssFeed() { try { return rssListener.getCache().get(RSS_KEY, (k) -> getRSS()); } catch (RuntimeException e) { throw new CustomWebApplicationException("RSS cache problems", HttpStatus.SC_INTERNAL_SERVER_ERROR); } } private String getRSS() { List<Tool> tools = toolDAO.findAllPublishedPathsOrderByDbupdatedate().stream().map(RSSToolPath::getTool).collect(Collectors.toList()); List<Workflow> workflows = bioWorkflowDAO.findAllPublishedPathsOrderByDbupdatedate().stream().map(RSSWorkflowPath::getBioWorkflow).collect( Collectors.toList()); List<Entry<?, ?>> dbEntries = new ArrayList<>(); dbEntries.addAll(tools); dbEntries.addAll(workflows); dbEntries.sort(Comparator.comparingLong(entry -> entry.getLastUpdated().getTime())); // TODO: after seeing if this works, make this more efficient than just returning everything RSSFeed feed = new RSSFeed(); RSSHeader header = new RSSHeader(); header.setCopyright("Copyright " + Year.now().getValue() + " OICR"); header.setTitle("Dockstore"); header.setDescription("Dockstore, developed by the Cancer Genome Collaboratory, is an open platform used by the GA4GH for sharing Docker-based tools described with either the Common Workflow Language (CWL) or the Workflow Description Language (WDL)."); header.setLanguage("en"); header.setLink("https://dockstore.org/"); header.setPubDate(RSSFeed.formatDate(Calendar.getInstance())); feed.setHeader(header); List<RSSEntry> entries = new ArrayList<>(); for (Entry<?, ?> dbEntry : dbEntries) { RSSEntry entry = new RSSEntry(); if (dbEntry instanceof Workflow) { Workflow workflow = (Workflow)dbEntry; entry.setTitle(workflow.getWorkflowPath()); String workflowURL = createWorkflowURL(workflow); entry.setGuid(workflowURL); entry.setLink(workflowURL); } else if (dbEntry instanceof Tool) { Tool tool = (Tool)dbEntry; entry.setTitle(tool.getPath()); String toolURL = createToolURL(tool); entry.setGuid(toolURL); entry.setLink(toolURL); } else { throw new CustomWebApplicationException("Unknown data type unsupported for RSS feed.", HttpStatus.SC_INTERNAL_SERVER_ERROR); } final int arbitraryDescriptionLimit = 200; entry.setDescription(StringUtils.truncate(dbEntry.getDescription(), arbitraryDescriptionLimit)); Calendar instance = Calendar.getInstance(); instance.setTime(dbEntry.getLastUpdated()); entry.setPubDate(RSSFeed.formatDate(instance)); entries.add(entry); } feed.setEntries(entries); try (ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream()) { RSSWriter.write(feed, byteArrayOutputStream); return byteArrayOutputStream.toString(StandardCharsets.UTF_8.name()); } catch (Exception e) { throw new CustomWebApplicationException("Could not write RSS feed.", HttpStatus.SC_INTERNAL_SERVER_ERROR); } } @GET @Produces({ "text/plain", "application/json" }) @Path("/runner_dependencies") @Operation(summary = "Returns the file containing runner dependencies", description = "Returns the file containing runner dependencies, NO authentication") @ApiResponse(description = "The requirements.txt file", content = @Content( mediaType = "application/json", schema = @Schema(implementation = String.class))) @ApiOperation(value = "Returns the file containing runner dependencies.", response = String.class) public Response getRunnerDependencies( @Parameter(name = "client_version", description = "The Dockstore client version") @ApiParam(value = "The Dockstore client version") @QueryParam("client_version") String clientVersion, @Parameter(name = "python_version", description = "Python version, only relevant for the cwltool runner", in = ParameterIn.QUERY, schema = @Schema(defaultValue = "2")) @ApiParam(value = "Python version, only relevant for the cwltool runner") @DefaultValue("3") @QueryParam("python_version") String pythonVersion, @Parameter(name = "runner", description = "The tool runner", in = ParameterIn.QUERY, schema = @Schema(defaultValue = "cwltool", allowableValues = {"cwltool"})) @ApiParam(value = "The tool runner", allowableValues = "cwltool") @DefaultValue("cwltool") @QueryParam("runner") String runner, @Parameter(name = "output", description = "Response type", in = ParameterIn.QUERY, schema = @Schema(defaultValue = "text", allowableValues = {"json", "text"})) @ApiParam(value = "Response type", allowableValues = "json, text") @DefaultValue("text") @QueryParam("output") String output, @Context ContainerRequestContext containerRequestContext) { if (!("cwltool").equals(runner)) { return Response.noContent().build(); } boolean unwrap = !("json").equals(output); String fileVersion = PipHelper.convertSemVerToAvailableVersion(clientVersion); try { String content = Resources.toString(this.getClass().getClassLoader() .getResource("requirements/" + fileVersion + "/requirements" + (pythonVersion.startsWith("3") ? "3" : "") + ".txt"), StandardCharsets.UTF_8); Map<String, String> pipDepMap = PipHelper.convertPipRequirementsStringToMap(content); return Response.status(Response.Status.OK).type(unwrap ? MediaType.TEXT_PLAIN : MediaType.APPLICATION_JSON) .entity(unwrap ? content : pipDepMap).build(); } catch (IOException e) { throw new CustomWebApplicationException("Could not retrieve runner dependencies file: " + e.getMessage(), HttpStatus.SC_INTERNAL_SERVER_ERROR); } } @GET @Timed @Path("/sourceControlList") @Produces(MediaType.APPLICATION_JSON) @Operation(summary = "Get the list of source controls supported on Dockstore", description = "Get the list of source controls supported on Dockstore, NO authentication") @ApiResponse(description = "List of source control repositories", content = @Content( mediaType = "application/json", array = @ArraySchema(schema = @Schema(implementation = SourceControl.SourceControlBean.class)))) @ApiOperation(value = "Get the list of source controls supported on Dockstore.", notes = "NO authentication", response = SourceControl.SourceControlBean.class, responseContainer = "List") public List<SourceControl.SourceControlBean> getSourceControlList() { List<SourceControl.SourceControlBean> sourceControlList = new ArrayList<>(); Arrays.asList(SourceControl.values()).forEach(sourceControl -> sourceControlList.add(new SourceControl.SourceControlBean(sourceControl))); return sourceControlList; } @GET @Timed @Path("/dockerRegistryList") @Produces(MediaType.APPLICATION_JSON) @Operation(summary = "Get the list of docker registries supported on Dockstore", description = "Get the list of docker registries supported on Dockstore, NO authentication") @ApiResponse(description = "List of Docker registries", content = @Content( mediaType = "application/json", array = @ArraySchema(schema = @Schema(implementation = Registry.RegistryBean.class)))) @ApiOperation(nickname = "getDockerRegistries", value = "Get the list of docker registries supported on Dockstore.", notes = "NO authentication", response = Registry.RegistryBean.class, responseContainer = "List") public List<Registry.RegistryBean> getDockerRegistries() { List<Registry.RegistryBean> registryList = new ArrayList<>(); Arrays.asList(Registry.values()).forEach(registry -> registryList.add(new Registry.RegistryBean(registry))); return registryList; } @GET @Timed @Path("/descriptorLanguageList") @Produces(MediaType.APPLICATION_JSON) @Operation(summary = "Get the list of descriptor languages supported on Dockstore", description = "Get the list of descriptor languages supported on Dockstore, NO authentication") @ApiResponse(description = "List of descriptor languages", content = @Content( mediaType = "application/json", array = @ArraySchema(schema = @Schema(implementation = DescriptorLanguage.DescriptorLanguageBean.class)))) @ApiOperation(value = "Get the list of descriptor languages supported on Dockstore.", notes = "NO authentication", response = DescriptorLanguage.DescriptorLanguageBean.class, responseContainer = "List") public List<DescriptorLanguage.DescriptorLanguageBean> getDescriptorLanguages() { List<DescriptorLanguage.DescriptorLanguageBean> descriptorLanguageList = new ArrayList<>(); Arrays.stream(DescriptorLanguage.values()).filter(lang -> // only include plugin languages that have installed plugins !lang.isPluginLanguage() || LanguageHandlerFactory.getPluginMap().containsKey(lang)) .forEach(descriptorLanguage -> descriptorLanguageList.add(new DescriptorLanguage.DescriptorLanguageBean(descriptorLanguage))); return descriptorLanguageList; } @GET @Timed @Path("/okHttpCachePerformance") @Produces(MediaType.APPLICATION_JSON) @Operation(summary = "Get measures of cache performance", description = "Get measures of cache performance, NO authentication") @ApiResponse(description = "Cache performance information", content = @Content(mediaType = "application/json")) @ApiOperation(value = "Get measures of cache performance.", notes = "NO authentication", response = Map.class) public Map<String, String> getCachePerformance() { Cache cache = DockstoreWebserviceApplication.getCache(null); Map<String, String> results = new HashMap<>(); results.put("requestCount", String.valueOf(cache.requestCount())); results.put("networkCount", String.valueOf(cache.networkCount())); results.put("hitCount", String.valueOf(cache.hitCount())); results.put("maxSize", cache.maxSize() + " bytes"); try { results.put("size", cache.size() + " bytes"); } catch (IOException e) { /* do nothing if we cannot report size */ LOG.warn("unable to determine cache size, may not have initialized yet"); } return results; } @GET @Timed @UnitOfWork @Path("/elasticSearch") @Operation(summary = "Successful response if elastic search is up and running", description = "Successful response if elastic search is up and running, NO authentication") @ApiOperation(value = "Successful response if elastic search is up and running.", notes = "NO authentication") public Response checkElasticSearch() { Response elasticSearchResponse; try { elasticSearchResponse = delegate.toolsIndexSearch(null, null, null); String result = IOUtils.toString((InputStream)(elasticSearchResponse.getEntity()), StandardCharsets.UTF_8); JSONObject jsonObj = new JSONObject(result); JSONObject hitsHolder = jsonObj.getJSONObject("hits"); JSONArray hitsArray = hitsHolder.getJSONArray("hits"); if (hitsArray.toList().isEmpty()) { return Response.status(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode()).build(); } } catch (Exception ex) { return Response.status(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode()).build(); } return Response.ok().build(); } @GET @Path("/config.json") @Produces(MediaType.APPLICATION_JSON) @Operation(summary = "Configuration for UI clients of the API", description = "Configuration, NO authentication") @ApiOperation(value = "Configuration for UI clients of the API", notes = "NO authentication") public Config getConfig() { try { return Config.fromWebConfig(this.config); } catch (InvocationTargetException | IllegalAccessException e) { LOG.error("Error generating config response", e); throw new CustomWebApplicationException("Error retrieving config information", HttpStatus.SC_INTERNAL_SERVER_ERROR); } } }
package hudson.plugins.git; import hudson.EnvVars; import hudson.FilePath; import hudson.Launcher; import hudson.matrix.MatrixBuild; import hudson.matrix.MatrixProject; import hudson.model.FreeStyleBuild; import hudson.model.Result; import hudson.model.TaskListener; import hudson.model.AbstractBuild; import hudson.model.FreeStyleProject; import hudson.model.Node; import hudson.plugins.git.extensions.GitSCMExtension; import hudson.plugins.git.extensions.impl.EnforceGitClient; import hudson.plugins.git.extensions.impl.DisableRemotePoll; import hudson.plugins.git.extensions.impl.PathRestriction; import hudson.plugins.git.extensions.impl.RelativeTargetDirectory; import hudson.plugins.git.extensions.impl.SparseCheckoutPath; import hudson.plugins.git.extensions.impl.SparseCheckoutPaths; import hudson.plugins.git.extensions.impl.UserExclusion; import hudson.remoting.VirtualChannel; import hudson.slaves.EnvironmentVariablesNodeProperty; import hudson.triggers.SCMTrigger; import hudson.util.StreamTaskListener; import java.io.File; import java.io.IOException; import java.io.ByteArrayOutputStream; import java.util.Collections; import java.util.List; import jenkins.MasterToSlaveFileCallable; import org.eclipse.jgit.lib.ObjectId; import org.eclipse.jgit.lib.PersonIdent; import org.jenkinsci.plugins.gitclient.Git; import org.jenkinsci.plugins.gitclient.GitClient; import org.jenkinsci.plugins.gitclient.JGitTool; import org.junit.Before; import org.junit.Rule; import org.junit.rules.TemporaryFolder; import org.jvnet.hudson.test.CaptureEnvironmentBuilder; import org.jvnet.hudson.test.JenkinsRule; import static org.junit.Assert.assertTrue; /** * Base class for single repository git plugin tests. * * @author Kohsuke Kawaguchi * @author ishaaq */ public abstract class AbstractGitTestCase { @Rule public JenkinsRule rule = new JenkinsRule(); @Rule public TemporaryFolder tempFolder = new TemporaryFolder(); protected TaskListener listener; protected TestGitRepo testRepo; // aliases of testRepo properties protected PersonIdent johnDoe; protected PersonIdent janeDoe; protected File workDir; // aliases "gitDir" protected FilePath workspace; // aliases "gitDirPath" protected GitClient git; @Before public void setUp() throws Exception { listener = StreamTaskListener.fromStderr(); testRepo = new TestGitRepo("unnamed", tempFolder.newFolder(), listener); johnDoe = testRepo.johnDoe; janeDoe = testRepo.janeDoe; workDir = testRepo.gitDir; workspace = testRepo.gitDirPath; git = testRepo.git; } protected void commit(final String fileName, final PersonIdent committer, final String message) throws GitException, InterruptedException { testRepo.commit(fileName, committer, message); } protected void commit(final String fileName, final String fileContent, final PersonIdent committer, final String message) throws GitException, InterruptedException { testRepo.commit(fileName, fileContent, committer, message); } protected void commit(final String fileName, final PersonIdent author, final PersonIdent committer, final String message) throws GitException, InterruptedException { testRepo.commit(fileName, author, committer, message); } protected List<UserRemoteConfig> createRemoteRepositories() throws IOException { return testRepo.remoteConfigs(); } protected FreeStyleProject createFreeStyleProject() throws IOException { return rule.createFreeStyleProject(); } protected FreeStyleProject setupProject(String branchString, boolean authorOrCommitter) throws Exception { return setupProject(branchString, authorOrCommitter, null); } protected FreeStyleProject setupProject(String branchString, boolean authorOrCommitter, String relativeTargetDir) throws Exception { return setupProject(branchString, authorOrCommitter, relativeTargetDir, null, null, null); } protected FreeStyleProject setupProject(String branchString, boolean authorOrCommitter, String relativeTargetDir, String excludedRegions, String excludedUsers, String includedRegions) throws Exception { return setupProject(branchString, authorOrCommitter, relativeTargetDir, excludedRegions, excludedUsers, null, false, includedRegions); } protected FreeStyleProject setupProject(String branchString, boolean authorOrCommitter, String relativeTargetDir, String excludedRegions, String excludedUsers, boolean fastRemotePoll, String includedRegions) throws Exception { return setupProject(branchString, authorOrCommitter, relativeTargetDir, excludedRegions, excludedUsers, null, fastRemotePoll, includedRegions); } protected FreeStyleProject setupProject(String branchString, boolean authorOrCommitter, String relativeTargetDir, String excludedRegions, String excludedUsers, String localBranch, boolean fastRemotePoll, String includedRegions) throws Exception { return setupProject(Collections.singletonList(new BranchSpec(branchString)), authorOrCommitter, relativeTargetDir, excludedRegions, excludedUsers, localBranch, fastRemotePoll, includedRegions); } protected FreeStyleProject setupProject(List<BranchSpec> branches, boolean authorOrCommitter, String relativeTargetDir, String excludedRegions, String excludedUsers, String localBranch, boolean fastRemotePoll, String includedRegions) throws Exception { return setupProject(branches, authorOrCommitter, relativeTargetDir, excludedRegions, excludedUsers, localBranch, fastRemotePoll, includedRegions, null); } protected FreeStyleProject setupProject(String branchString, List<SparseCheckoutPath> sparseCheckoutPaths) throws Exception { return setupProject(Collections.singletonList(new BranchSpec(branchString)), false, null, null, null, null, false, null, sparseCheckoutPaths); } protected FreeStyleProject setupProject(List<BranchSpec> branches, boolean authorOrCommitter, String relativeTargetDir, String excludedRegions, String excludedUsers, String localBranch, boolean fastRemotePoll, String includedRegions, List<SparseCheckoutPath> sparseCheckoutPaths) throws Exception { FreeStyleProject project = createFreeStyleProject(); GitSCM scm = new GitSCM( createRemoteRepositories(), branches, false, Collections.<SubmoduleConfig>emptyList(), null, null, Collections.<GitSCMExtension>emptyList()); scm.getExtensions().add(new DisableRemotePoll()); // don't work on a file:// repository if (relativeTargetDir!=null) scm.getExtensions().add(new RelativeTargetDirectory(relativeTargetDir)); if (excludedUsers!=null) scm.getExtensions().add(new UserExclusion(excludedUsers)); if (excludedRegions!=null || includedRegions!=null) scm.getExtensions().add(new PathRestriction(includedRegions,excludedRegions)); scm.getExtensions().add(new SparseCheckoutPaths(sparseCheckoutPaths)); project.setScm(scm); project.getBuildersList().add(new CaptureEnvironmentBuilder()); return project; } /** * Creates a new project and configures the GitSCM according the parameters. * @param repos * @param branchSpecs * @param scmTriggerSpec * @param disableRemotePoll Disable Workspace-less polling via "git ls-remote" * @return * @throws Exception */ protected FreeStyleProject setupProject(List<UserRemoteConfig> repos, List<BranchSpec> branchSpecs, String scmTriggerSpec, boolean disableRemotePoll, EnforceGitClient enforceGitClient) throws Exception { FreeStyleProject project = createFreeStyleProject(); GitSCM scm = new GitSCM( repos, branchSpecs, false, Collections.<SubmoduleConfig>emptyList(), null, JGitTool.MAGIC_EXENAME, Collections.<GitSCMExtension>emptyList()); if(disableRemotePoll) scm.getExtensions().add(new DisableRemotePoll()); if(enforceGitClient != null) scm.getExtensions().add(enforceGitClient); project.setScm(scm); if(scmTriggerSpec != null) { SCMTrigger trigger = new SCMTrigger(scmTriggerSpec); project.addTrigger(trigger); trigger.start(project, true); } //project.getBuildersList().add(new CaptureEnvironmentBuilder()); project.save(); return project; } protected FreeStyleProject setupSimpleProject(String branchString) throws Exception { return setupProject(branchString,false); } protected FreeStyleBuild build(final FreeStyleProject project, final Result expectedResult, final String...expectedNewlyCommittedFiles) throws Exception { final FreeStyleBuild build = project.scheduleBuild2(0).get(); System.out.println(build.getLog()); for(final String expectedNewlyCommittedFile : expectedNewlyCommittedFiles) { assertTrue(expectedNewlyCommittedFile + " file not found in workspace", build.getWorkspace().child(expectedNewlyCommittedFile).exists()); } if(expectedResult != null) { rule.assertBuildStatus(expectedResult, build); } return build; } protected FreeStyleBuild build(final FreeStyleProject project, final String parentDir, final Result expectedResult, final String...expectedNewlyCommittedFiles) throws Exception { final FreeStyleBuild build = project.scheduleBuild2(0).get(); System.out.println(build.getLog()); for(final String expectedNewlyCommittedFile : expectedNewlyCommittedFiles) { assertTrue(build.getWorkspace().child(parentDir).child(expectedNewlyCommittedFile).exists()); } if(expectedResult != null) { rule.assertBuildStatus(expectedResult, build); } return build; } protected MatrixBuild build(final MatrixProject project, final Result expectedResult, final String...expectedNewlyCommittedFiles) throws Exception { final MatrixBuild build = project.scheduleBuild2(0).get(); System.out.println(build.getLog()); for(final String expectedNewlyCommittedFile : expectedNewlyCommittedFiles) { assertTrue(expectedNewlyCommittedFile + " file not found in workspace", build.getWorkspace().child(expectedNewlyCommittedFile).exists()); } if(expectedResult != null) { rule.assertBuildStatus(expectedResult, build); } return build; } protected EnvVars getEnvVars(FreeStyleProject project) { for (hudson.tasks.Builder b : project.getBuilders()) { if (b instanceof CaptureEnvironmentBuilder) { return ((CaptureEnvironmentBuilder)b).getEnvVars(); } } return new EnvVars(); } protected void setVariables(Node node, EnvironmentVariablesNodeProperty.Entry... entries) throws IOException { node.getNodeProperties().replaceBy( Collections.singleton(new EnvironmentVariablesNodeProperty( entries))); } protected String getHeadRevision(AbstractBuild build, final String branch) throws IOException, InterruptedException { return build.getWorkspace().act(new MasterToSlaveFileCallable<String>() { public String invoke(File f, VirtualChannel channel) throws IOException, InterruptedException { try { ObjectId oid = Git.with(null, null).in(f).getClient().getRepository().resolve("refs/heads/" + branch); return oid.name(); } catch (GitException e) { throw new RuntimeException(e); } } }); } /** A utility method that displays a git repo. Useful to visualise merges. */ public void showRepo(TestGitRepo repo, String msg) throws Exception { System.out.println("*********** "+msg+" ***********"); ByteArrayOutputStream out = new ByteArrayOutputStream(); int returnCode = new Launcher.LocalLauncher(listener).launch().cmds("git", "log","--all","--graph","--decorate","--oneline").pwd(repo.gitDir.getCanonicalPath()).stdout(out).join(); System.out.println(out.toString()); out.close(); } }
/* * Licensed to DuraSpace under one or more contributor license agreements. * See the NOTICE file distributed with this work for additional information * regarding copyright ownership. * * DuraSpace licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.fcrepo.integration; import static java.lang.Integer.MAX_VALUE; import static java.lang.Integer.parseInt; import static javax.ws.rs.core.HttpHeaders.ACCEPT; import static javax.ws.rs.core.HttpHeaders.CONTENT_TYPE; import static javax.ws.rs.core.Response.Status.CREATED; import static javax.ws.rs.core.Response.Status.NO_CONTENT; import static javax.ws.rs.core.Response.Status.OK; import static org.fcrepo.http.commons.test.util.TestHelpers.parseTriples; import static org.junit.Assert.assertEquals; import static org.slf4j.LoggerFactory.getLogger; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.util.UUID; import org.apache.http.HttpHost; import org.apache.http.HttpResponse; import org.apache.http.auth.AuthScope; import org.apache.http.auth.Credentials; import org.apache.http.auth.UsernamePasswordCredentials; import org.apache.http.client.AuthCache; import org.apache.http.client.ClientProtocolException; import org.apache.http.client.CredentialsProvider; import org.apache.http.client.HttpClient; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpPatch; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPut; import org.apache.http.client.methods.HttpUriRequest; import org.apache.http.client.protocol.HttpClientContext; import org.apache.http.entity.BasicHttpEntity; import org.apache.http.entity.StringEntity; import org.apache.http.impl.auth.BasicScheme; import org.apache.http.impl.client.BasicAuthCache; import org.apache.http.impl.client.BasicCredentialsProvider; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClientBuilder; import org.apache.http.impl.client.HttpClients; import org.apache.http.util.EntityUtils; import org.apache.jena.query.Dataset; import org.junit.Before; import org.slf4j.Logger; /** * Base class for ITs * @author awoods * @author escowles **/ public abstract class AbstractResourceIT { protected Logger logger; public static Credentials FEDORA_ADMIN_CREDENTIALS = new UsernamePasswordCredentials("fedoraAdmin", "fedoraAdmin"); @Before public void setLogger() { logger = getLogger(this.getClass()); } protected static final int SERVER_PORT = parseInt(System.getProperty( "fcrepo.dynamic.test.port", "8080")); private static final String CONTEXT_PATH = System .getProperty("fcrepo.test.context.path"); protected static final String HOSTNAME = "localhost"; protected static final String PROTOCOL = "http"; protected static final String serverAddress = PROTOCOL + "://" + HOSTNAME + ":" + SERVER_PORT + CONTEXT_PATH + "rest/"; protected static HttpClient client = createClient(); protected static HttpClient createClient() { return HttpClientBuilder.create().setMaxConnPerRoute(MAX_VALUE) .setMaxConnTotal(MAX_VALUE).build(); } protected static HttpPost postObjMethod(final String pid) { return new HttpPost(serverAddress + pid); } protected static HttpPut putObjMethod(final String pid) { return new HttpPut(serverAddress + pid); } protected static HttpPost postObjMethod(final String pid, final String query) { if (query.equals("")) { return new HttpPost(serverAddress + pid); } return new HttpPost(serverAddress + pid + "?" + query); } protected static HttpPost postDSMethod(final String pid, final String ds, final String content) throws UnsupportedEncodingException { final HttpPost post = new HttpPost(serverAddress + pid + "/" + ds + "/fcr:content"); post.setEntity(new StringEntity(content)); return post; } protected static HttpPut putDSMethod(final String pid, final String ds, final String content) throws UnsupportedEncodingException { final HttpPut put = new HttpPut(serverAddress + pid + "/" + ds + "/fcr:content"); put.setEntity(new StringEntity(content)); return put; } protected HttpResponse execute(final HttpUriRequest method) throws ClientProtocolException, IOException { logger.debug("Executing: " + method.getMethod() + " to " + method.getURI()); return client.execute(method); } // Executes requests with preemptive basic authentication protected HttpResponse executeWithBasicAuth(final HttpUriRequest request, final String username, final String password) throws IOException { final HttpHost target = new HttpHost(HOSTNAME, SERVER_PORT, PROTOCOL); final CredentialsProvider credsProvider = new BasicCredentialsProvider(); credsProvider.setCredentials( new AuthScope(target.getHostName(), target.getPort()), new UsernamePasswordCredentials(username, password)); try (final CloseableHttpClient httpclient = HttpClients.custom() .setDefaultCredentialsProvider(credsProvider).build()) { final AuthCache authCache = new BasicAuthCache(); final BasicScheme basicAuth = new BasicScheme(); authCache.put(target, basicAuth); final HttpClientContext localContext = HttpClientContext.create(); localContext.setAuthCache(authCache); final CloseableHttpResponse response = httpclient.execute(request, localContext); return response; } } protected int getStatus(final HttpUriRequest method) throws ClientProtocolException, IOException { final HttpResponse response = execute(method); final int result = response.getStatusLine().getStatusCode(); if (!(result > 199) || !(result < 400)) { logger.warn(EntityUtils.toString(response.getEntity())); } return result; } protected String getContentType(final HttpUriRequest method) throws ClientProtocolException, IOException { final HttpResponse response = execute(method); final int result = response.getStatusLine().getStatusCode(); assertEquals(OK.getStatusCode(), result); return response.getFirstHeader(CONTENT_TYPE).getValue(); } protected Dataset getDataset(final HttpClient client, final HttpUriRequest method) throws IOException { if (method.getFirstHeader(ACCEPT) == null) { method.addHeader(ACCEPT, "application/n-triples"); } else { logger.debug("Retrieving RDF in mimeType: {}", method .getFirstHeader(ACCEPT)); } final HttpResponse response = client.execute(method); assertEquals(OK.getStatusCode(), response.getStatusLine() .getStatusCode()); final Dataset result = parseTriples(response.getEntity()); logger.trace("Retrieved RDF: {}", result); return result; } protected Dataset getDataset(final HttpResponse response) throws IOException { assertEquals(OK.getStatusCode(), response.getStatusLine().getStatusCode()); final Dataset result = parseTriples(response.getEntity()); logger.trace("Retrieved RDF: {}", result); return result; } protected Dataset getDataset(final HttpUriRequest method) throws IOException { return getDataset(client, method); } protected HttpResponse createObject(final String pid) throws IOException { final HttpPost httpPost = postObjMethod("/"); if (pid.length() > 0) { httpPost.addHeader("Slug", pid); } final HttpResponse response = client.execute(httpPost); assertEquals(CREATED.getStatusCode(), response.getStatusLine().getStatusCode()); return response; } protected HttpResponse createDatastream(final String pid, final String dsid, final String content) throws IOException { logger.trace( "Attempting to create datastream for object: {} at datastream ID: {}", pid, dsid); final HttpResponse response = client.execute(postDSMethod(pid, dsid, content)); assertEquals(CREATED.getStatusCode(), response.getStatusLine().getStatusCode()); return response; } protected HttpResponse setProperty(final String pid, final String propertyUri, final String value) throws IOException { return setProperty(pid, null, propertyUri, value); } protected HttpResponse setProperty(final String pid, final String txId, final String propertyUri, final String value) throws IOException { final HttpPatch postProp = new HttpPatch(serverAddress + (txId != null ? txId + "/" : "") + pid); postProp.setHeader(CONTENT_TYPE, "application/sparql-update"); final String updateString = "INSERT { <" + serverAddress + pid + "> <" + propertyUri + "> \"" + value + "\" } WHERE { }"; postProp.setEntity(new StringEntity(updateString)); final HttpResponse dcResp = execute(postProp); assertEquals(dcResp.getStatusLine().toString(), 204, dcResp.getStatusLine().getStatusCode()); postProp.releaseConnection(); return dcResp; } protected static void addMixin(final String pid, final String mixinUrl) throws IOException { final HttpPatch updateObjectGraphMethod = new HttpPatch(serverAddress + pid); updateObjectGraphMethod.addHeader(CONTENT_TYPE, "application/sparql-update"); final BasicHttpEntity e = new BasicHttpEntity(); e.setContent(new ByteArrayInputStream( ("INSERT DATA { <> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <" + mixinUrl + "> . } ") .getBytes())); updateObjectGraphMethod.setEntity(e); final HttpResponse response = client.execute(updateObjectGraphMethod); assertEquals(NO_CONTENT.getStatusCode(), response.getStatusLine() .getStatusCode()); } /** * Gets a random (but valid) pid for use in testing. This pid * is guaranteed to be unique within runs of this application. * * @return a random UUID */ protected static String getRandomUniquePid() { return UUID.randomUUID().toString(); } /** * Gets a random (but valid) property name for use in testing. * * @return a random property name */ protected static String getRandomPropertyName() { return UUID.randomUUID().toString(); } /** * Gets a random (but valid) property value for use in testing. * * @return a random property value */ protected static String getRandomPropertyValue() { return UUID.randomUUID().toString(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.codec.net; import java.io.ByteArrayOutputStream; import java.io.UnsupportedEncodingException; import java.util.BitSet; import org.apache.commons.codec.BinaryDecoder; import org.apache.commons.codec.BinaryEncoder; import org.apache.commons.codec.DecoderException; import org.apache.commons.codec.EncoderException; import org.apache.commons.codec.StringDecoder; import org.apache.commons.codec.StringEncoder; /** * <p> * Implements the 'www-form-urlencoded' encoding scheme, also misleadingly known * as URL encoding. * </p> * * <p> * For more detailed information please refer to <a * href="http://www.w3.org/TR/html4/interact/forms.html#h-17.13.4.1"> Chapter * 17.13.4 'Form content types'</a> of the <a * href="http://www.w3.org/TR/html4/">HTML 4.01 Specification<a> * </p> * * <p> * This codec is meant to be a replacement for standard Java classes * {@link java.net.URLEncoder} and {@link java.net.URLDecoder} on older Java * platforms, as these classes in Java versions below 1.4 rely on the platform's * default charset encoding. * </p> * * @author Apache Software Foundation * @since 1.2 * @version $Id: URLCodec.java 480406 2006-11-29 04:56:58 +0000 (Mi, 29 Nov * 2006) bayard $ */ public class URLCodec implements BinaryEncoder, BinaryDecoder, StringEncoder, StringDecoder { /** * The default charset used for string decoding and encoding. */ protected String charset = CharacterEncodingNames.UTF8; static protected byte ESCAPE_CHAR = '%'; /** * BitSet of www-form-url safe characters. */ static final protected BitSet WWW_FORM_URL = new BitSet(256); // Static initializer for www_form_url static { // alpha characters for (int i = 'a'; i <= 'z'; i++) { WWW_FORM_URL.set(i); } for (int i = 'A'; i <= 'Z'; i++) { WWW_FORM_URL.set(i); } // numeric characters for (int i = '0'; i <= '9'; i++) { WWW_FORM_URL.set(i); } // special chars WWW_FORM_URL.set('-'); WWW_FORM_URL.set('_'); WWW_FORM_URL.set('.'); WWW_FORM_URL.set('*'); // blank to be replaced with + WWW_FORM_URL.set(' '); } /** * Default constructor. */ public URLCodec() { super(); } /** * Constructor which allows for the selection of a default charset * * @param charset * the default string charset to use. */ public URLCodec(String charset) { super(); this.charset = charset; } /** * Encodes an array of bytes into an array of URL safe 7-bit characters. * Unsafe characters are escaped. * * @param bytes * array of bytes to convert to URL safe characters * @return array of bytes containing URL safe characters */ public byte[] encode(byte[] bytes) { return encodeUrl(WWW_FORM_URL, bytes); } /** * Decodes an array of URL safe 7-bit characters into an array of original * bytes. Escaped characters are converted back to their original * representation. * * @param bytes * array of URL safe characters * @return array of original bytes * @throws DecoderException * Thrown if URL decoding is unsuccessful */ public byte[] decode(byte[] bytes) throws DecoderException { return decodeUrl(bytes); } /** * The <code>String</code> encoding used for decoding and encoding. * * @return Returns the encoding. * * @deprecated use #getDefaultCharset() */ public String getEncoding() { return this.charset; } /** * The default charset used for string decoding and encoding. * * @return the default string charset. */ public String getDefaultCharset() { return this.charset; } /** * Encodes an array of bytes into an array of URL safe 7-bit characters. * Unsafe characters are escaped. * * @param urlsafe * bitset of characters deemed URL safe * @param bytes * array of bytes to convert to URL safe characters * @return array of bytes containing URL safe characters */ static final public byte[] encodeUrl(BitSet urlsafe, byte[] bytes) { if (bytes == null) { return null; } if (urlsafe == null) { urlsafe = WWW_FORM_URL; } ByteArrayOutputStream buffer = new ByteArrayOutputStream(); for (int i = 0; i < bytes.length; i++) { int b = bytes[i]; if (b < 0) { b = 256 + b; } if (urlsafe.get(b)) { if (b == ' ') { b = '+'; } buffer.write(b); } else { buffer.write('%'); char hex1 = Character.toUpperCase(Character.forDigit( (b >> 4) & 0xF, 16)); char hex2 = Character.toUpperCase(Character.forDigit(b & 0xF, 16)); buffer.write(hex1); buffer.write(hex2); } } return buffer.toByteArray(); } /** * Decodes an array of URL safe 7-bit characters into an array of original * bytes. Escaped characters are converted back to their original * representation. * * @param bytes * array of URL safe characters * @return array of original bytes * @throws DecoderException * Thrown if URL decoding is unsuccessful */ static final public byte[] decodeUrl(byte[] bytes) throws DecoderException { if (bytes == null) { return null; } ByteArrayOutputStream buffer = new ByteArrayOutputStream(); for (int i = 0; i < bytes.length; i++) { int b = bytes[i]; if (b == '+') { buffer.write(' '); } else if (b == '%') { try { int u = Character.digit((char) bytes[++i], 16); int l = Character.digit((char) bytes[++i], 16); if (u == -1 || l == -1) { throw new DecoderException("Invalid URL encoding"); } buffer.write((char) ((u << 4) + l)); } catch (ArrayIndexOutOfBoundsException e) { throw new DecoderException("Invalid URL encoding"); } } else { buffer.write(b); } } return buffer.toByteArray(); } /** * Encodes a string into its URL safe form using the specified string * charset. Unsafe characters are escaped. * * @param pString * string to convert to a URL safe form * @param charset * the charset for pString * @return URL safe string * @throws UnsupportedEncodingException * Thrown if charset is not supported */ public String encode(String pString, String charset) throws UnsupportedEncodingException { if (pString == null) { return null; } return new String(encode(pString.getBytes(charset)), CharacterEncodingNames.US_ASCII); } /** * Encodes a string into its URL safe form using the default string charset. * Unsafe characters are escaped. * * @param pString * string to convert to a URL safe form * @return URL safe string * @throws EncoderException * Thrown if URL encoding is unsuccessful * * @see #getDefaultCharset() */ public String encode(String pString) throws EncoderException { if (pString == null) { return null; } try { return encode(pString, getDefaultCharset()); } catch (UnsupportedEncodingException e) { throw new EncoderException(e.getMessage()); } } /** * Decodes a URL safe string into its original form using the specified * encoding. Escaped characters are converted back to their original * representation. * * @param pString * URL safe string to convert into its original form * @param charset * the original string charset * @return original string * @throws DecoderException * Thrown if URL decoding is unsuccessful * @throws UnsupportedEncodingException * Thrown if charset is not supported */ public String decode(String pString, String charset) throws DecoderException, UnsupportedEncodingException { if (pString == null) { return null; } return new String(decode(pString .getBytes(CharacterEncodingNames.US_ASCII)), charset); } /** * Decodes a URL safe string into its original form using the default string * charset. Escaped characters are converted back to their original * representation. * * @param pString * URL safe string to convert into its original form * @return original string * @throws DecoderException * Thrown if URL decoding is unsuccessful * * @see #getDefaultCharset() */ public String decode(String pString) throws DecoderException { if (pString == null) { return null; } try { return decode(pString, getDefaultCharset()); } catch (UnsupportedEncodingException e) { throw new DecoderException(e.getMessage()); } } /** * Encodes an object into its URL safe form. Unsafe characters are escaped. * * @param pObject * string to convert to a URL safe form * @return URL safe object * @throws EncoderException * Thrown if URL encoding is not applicable to objects of this * type or if encoding is unsuccessful */ public Object encode(Object pObject) throws EncoderException { if (pObject == null) { return null; } else if (pObject instanceof byte[]) { return encode((byte[]) pObject); } else if (pObject instanceof String) { return encode((String) pObject); } else { throw new EncoderException("Objects of type " + pObject.getClass().getName() + " cannot be URL encoded"); } } /** * Decodes a URL safe object into its original form. Escaped characters are * converted back to their original representation. * * @param pObject * URL safe object to convert into its original form * @return original object * @throws DecoderException * Thrown if the argument is not a <code>String</code> or * <code>byte[]</code>. Thrown if a failure condition is * encountered during the decode process. */ public Object decode(Object pObject) throws DecoderException { if (pObject == null) { return null; } else if (pObject instanceof byte[]) { return decode((byte[]) pObject); } else if (pObject instanceof String) { return decode((String) pObject); } else { throw new DecoderException("Objects of type " + pObject.getClass().getName() + " cannot be URL decoded"); } } }
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package com.mundosica.coffx.utility; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import static org.junit.Assert.*; import static com.mundosica.coffx.utility.Util.*; import java.util.Collection; import java.util.Map; import org.junit.Ignore; /** * * @author Inspiron I5558 */ public class UtilTest { public UtilTest() { } @BeforeClass public static void setUpClass() { } @AfterClass public static void tearDownClass() { } @Before public void setUp() { } @After public void tearDown() { } /** * Test of under_score2 method, of class Util. */ @Test public void testUnder_scores() { String str = "HolaMundoEsteEsUnEjemploDelUnderScore"; String expResult = "hola_mundo_este_es_un_ejemplo_del_under_score"; String result = under_score2(str); assertEquals(expResult, result); } /** * Test of empty method, of class Util. */ @Test public void testEmpty_ObjectArr() { } /** * Test of empty method, of class Util. */ @Test public void testEmpty_Collection() { } /** * Test of empty method, of class Util. */ @Test public void testEmpty_String() { } /** * Test of join method, of class Util. */ @Test public void testJoin_StringArr_String() { } /** * Test of join method, of class Util. */ @Test public void testJoin_Collection_String() { } /** * Test of join method, of class Util. */ @Test public void testJoin_Map_String() { } /** * Test of repeat method, of class Util. */ @Test public void testRepeat() { } /** * Test of repeatInArray method, of class Util. */ @Test public void testRepeatInArray() { } /** * Test of joinAndRepeat method, of class Util. */ @Ignore public void testJoinAndRepeat() { System.out.println("joinAndRepeat"); String str = ""; String delimiter = ""; int count = 0; String expResult = ""; String result = Util.joinAndRepeat(str, delimiter, count); assertEquals(expResult, result); // TODO review the generated test code and remove the default call to fail. fail("The test case is a prototype."); } /** * Test of args method, of class Util. */ @Ignore public void testArgs() { System.out.println("args"); String[] input = null; Map<String, String> expResult = null; Map<String, String> result = Util.args(input); assertEquals(expResult, result); // TODO review the generated test code and remove the default call to fail. fail("The test case is a prototype."); } /** * Test of echo method, of class Util. */ @Ignore public void testEcho_Map() { System.out.println("echo"); Map map = null; Util.echo(map); // TODO review the generated test code and remove the default call to fail. fail("The test case is a prototype."); } /** * Test of echo method, of class Util. */ @Ignore public void testEcho_Collection() { System.out.println("echo"); Collection collection = null; Util.echo(collection); // TODO review the generated test code and remove the default call to fail. fail("The test case is a prototype."); } /** * Test of echo method, of class Util. */ @Ignore public void testEcho_String() { System.out.println("echo"); String str = ""; Util.echo(str); // TODO review the generated test code and remove the default call to fail. fail("The test case is a prototype."); } /** * Test of echo method, of class Util. */ @Ignore public void testEcho_Object() { System.out.println("echo"); Object o = null; Util.echo(o); // TODO review the generated test code and remove the default call to fail. fail("The test case is a prototype."); } /** * Test of camel method, of class Util. */ @Test public void testCamel() { System.out.println("camel"); String expResult = "HolaMundo"; String result = Util.camel("hola mundo"); assertEquals(expResult, result); } /** * Test of under_score method, of class Util. */ @Ignore public void testUnder_score() { System.out.println("under_score"); String str = ""; String expResult = ""; String result = Util.under_score(str); assertEquals(expResult, result); // TODO review the generated test code and remove the default call to fail. fail("The test case is a prototype."); } /** * Test of under_score2 method, of class Util. */ @Ignore public void testUnder_score2() { System.out.println("under_score2"); String str = ""; String expResult = ""; String result = Util.under_score2(str); assertEquals(expResult, result); // TODO review the generated test code and remove the default call to fail. fail("The test case is a prototype."); } /** * Test of main method, of class Util. */ @Ignore public void testMain() { System.out.println("main"); String[] arg = null; Util.main(arg); // TODO review the generated test code and remove the default call to fail. fail("The test case is a prototype."); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.impl.engine; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.LinkedList; import java.util.List; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; import java.util.stream.Stream; import org.apache.camel.Exchange; import org.apache.camel.ExtendedExchange; import org.apache.camel.MessageHistory; import org.apache.camel.spi.InflightRepository; import org.apache.camel.support.ExchangeHelper; import org.apache.camel.support.service.ServiceSupport; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Default {@link org.apache.camel.spi.InflightRepository}. */ public class DefaultInflightRepository extends ServiceSupport implements InflightRepository { private static final Logger LOG = LoggerFactory.getLogger(DefaultInflightRepository.class); private final AtomicInteger size = new AtomicInteger(); private final ConcurrentMap<String, Exchange> inflight = new ConcurrentHashMap<>(); private final ConcurrentMap<String, AtomicInteger> routeCount = new ConcurrentHashMap<>(); private boolean inflightExchangeEnabled; @Override public void add(Exchange exchange) { size.incrementAndGet(); if (inflightExchangeEnabled) { inflight.put(exchange.getExchangeId(), exchange); } } @Override public void remove(Exchange exchange) { size.decrementAndGet(); if (inflightExchangeEnabled) { inflight.remove(exchange.getExchangeId()); } } @Override public void add(Exchange exchange, String routeId) { AtomicInteger existing = routeCount.get(routeId); if (existing != null) { existing.incrementAndGet(); } } @Override public void remove(Exchange exchange, String routeId) { AtomicInteger existing = routeCount.get(routeId); if (existing != null) { existing.decrementAndGet(); } } @Override public int size() { return size.get(); } @Override public void addRoute(String routeId) { routeCount.putIfAbsent(routeId, new AtomicInteger()); } @Override public void removeRoute(String routeId) { routeCount.remove(routeId); } @Override public int size(String routeId) { AtomicInteger existing = routeCount.get(routeId); return existing != null ? existing.get() : 0; } @Override public boolean isInflightBrowseEnabled() { return inflightExchangeEnabled; } @Override public void setInflightBrowseEnabled(boolean inflightBrowseEnabled) { this.inflightExchangeEnabled = inflightBrowseEnabled; } @Override public Collection<InflightExchange> browse() { return browse(null, -1, false); } @Override public Collection<InflightExchange> browse(String fromRouteId) { return browse(fromRouteId, -1, false); } @Override public Collection<InflightExchange> browse(int limit, boolean sortByLongestDuration) { return browse(null, limit, sortByLongestDuration); } @Override public Collection<InflightExchange> browse(String fromRouteId, int limit, boolean sortByLongestDuration) { if (!inflightExchangeEnabled) { return Collections.emptyList(); } Stream<Exchange> values; if (fromRouteId == null) { // all values values = inflight.values().stream(); } else { // only if route match values = inflight.values().stream() .filter(e -> fromRouteId.equals(e.getFromRouteId())); } if (sortByLongestDuration) { // sort by duration and grab the first values = values.sorted((e1, e2) -> { long d1 = getExchangeDuration(e1); long d2 = getExchangeDuration(e2); // need the biggest number first return -1 * Long.compare(d1, d2); }); } else { // else sort by exchange id values = values.sorted(Comparator.comparing(Exchange::getExchangeId)); } if (limit > 0) { values = values.limit(limit); } List<InflightExchange> answer = values.map(InflightExchangeEntry::new).collect(Collectors.toList()); return Collections.unmodifiableCollection(answer); } @Override public InflightExchange oldest(String fromRouteId) { if (!inflightExchangeEnabled) { return null; } Stream<Exchange> values; if (fromRouteId == null) { // all values values = inflight.values().stream(); } else { // only if route match values = inflight.values().stream() .filter(e -> fromRouteId.equals(e.getFromRouteId())); } // sort by duration and grab the first Exchange first = values.sorted((e1, e2) -> { long d1 = getExchangeDuration(e1); long d2 = getExchangeDuration(e2); // need the biggest number first return -1 * Long.compare(d1, d2); }).findFirst().orElse(null); if (first != null) { return new InflightExchangeEntry(first); } else { return null; } } @Override protected void doStart() throws Exception { } @Override protected void doStop() throws Exception { int count = size(); if (count > 0) { LOG.warn("Shutting down while there are still {} inflight exchanges.", count); } else { LOG.debug("Shutting down with no inflight exchanges."); } routeCount.clear(); } private static long getExchangeDuration(Exchange exchange) { return System.currentTimeMillis() - exchange.getCreated(); } private static final class InflightExchangeEntry implements InflightExchange { private final Exchange exchange; private InflightExchangeEntry(Exchange exchange) { this.exchange = exchange; } @Override public Exchange getExchange() { return exchange; } @Override public long getDuration() { return DefaultInflightRepository.getExchangeDuration(exchange); } @Override @SuppressWarnings("unchecked") public long getElapsed() { // this can only be calculate if message history is enabled LinkedList<MessageHistory> list = exchange.getProperty(Exchange.MESSAGE_HISTORY, LinkedList.class); if (list == null || list.isEmpty()) { return 0; } // get latest entry MessageHistory history = list.getLast(); if (history != null) { long elapsed = history.getElapsed(); if (elapsed == 0 && history.getTime() > 0) { // still in progress, so lets compute it via the start time elapsed = System.currentTimeMillis() - history.getTime(); } return elapsed; } else { return 0; } } @Override @SuppressWarnings("unchecked") public String getNodeId() { return exchange.adapt(ExtendedExchange.class).getHistoryNodeId(); } @Override public String getFromRouteId() { return exchange.getFromRouteId(); } @Override @SuppressWarnings("unchecked") public String getAtRouteId() { return ExchangeHelper.getAtRouteId(exchange); } @Override public String toString() { return "InflightExchangeEntry[exchangeId=" + exchange.getExchangeId() + "]"; } } }
// CHECKSTYLE IGNORE Javadoc /* * * Copyright 2012 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.simianarmy; import java.util.Calendar; import java.util.Date; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.concurrent.TimeUnit; import org.jclouds.compute.ComputeService; import org.jclouds.domain.LoginCredentials; import org.jclouds.ssh.SshClient; import org.testng.Assert; import com.netflix.simianarmy.MonkeyRecorder.Event; import com.netflix.simianarmy.basic.BasicConfiguration; import com.netflix.simianarmy.basic.BasicRecorderEvent; public class TestMonkeyContext implements Monkey.Context { private final MonkeyType monkeyType; private final LinkedList<Event> eventReport = new LinkedList<Event>(); public TestMonkeyContext(MonkeyType monkeyType) { this.monkeyType = monkeyType; } @Override public MonkeyConfiguration configuration() { return new BasicConfiguration(new Properties()); } @Override public MonkeyScheduler scheduler() { return new MonkeyScheduler() { @Override public int frequency() { return 1; } @Override public TimeUnit frequencyUnit() { return TimeUnit.HOURS; } @Override public void start(Monkey monkey, Runnable run) { Assert.assertEquals(monkey.type().name(), monkeyType.name(), "starting monkey"); run.run(); } @Override public void stop(Monkey monkey) { Assert.assertEquals(monkey.type().name(), monkeyType.name(), "stopping monkey"); } }; } @Override public MonkeyCalendar calendar() { // CHECKSTYLE IGNORE MagicNumberCheck return new MonkeyCalendar() { @Override public boolean isMonkeyTime(Monkey monkey) { return true; } @Override public int openHour() { return 10; } @Override public int closeHour() { return 11; } @Override public Calendar now() { return Calendar.getInstance(); } @Override public Date getBusinessDay(Date date, int n) { throw new RuntimeException("Not implemented."); } }; } @Override public CloudClient cloudClient() { return new CloudClient() { @Override public void terminateInstance(String instanceId) { } @Override public void createTagsForResources(Map<String, String> keyValueMap, String... resourceIds) { } @Override public void deleteAutoScalingGroup(String asgName) { } @Override public void deleteVolume(String volumeId) { } @Override public void deleteSnapshot(String snapshotId) { } @Override public void deleteImage(String imageId) { } @Override public void deleteLaunchConfiguration(String launchConfigName) { } @Override public List<String> listAttachedVolumes(String instanceId, boolean includeRoot) { throw new UnsupportedOperationException(); } @Override public void detachVolume(String instanceId, String volumeId, boolean force) { throw new UnsupportedOperationException(); } @Override public ComputeService getJcloudsComputeService() { throw new UnsupportedOperationException(); } @Override public String getJcloudsId(String instanceId) { throw new UnsupportedOperationException(); } @Override public SshClient connectSsh(String instanceId, LoginCredentials credentials) { throw new UnsupportedOperationException(); } @Override public String findSecurityGroup(String instanceId, String groupName) { throw new UnsupportedOperationException(); } @Override public String createSecurityGroup(String instanceId, String groupName, String description) { throw new UnsupportedOperationException(); } @Override public boolean canChangeInstanceSecurityGroups(String instanceId) { throw new UnsupportedOperationException(); } @Override public void setInstanceSecurityGroups(String instanceId, List<String> groupIds) { throw new UnsupportedOperationException(); } }; } private final MonkeyRecorder recorder = new MonkeyRecorder() { private final List<Event> events = new LinkedList<Event>(); @Override public Event newEvent(MonkeyType mkType, EventType eventType, String region, String id) { return new BasicRecorderEvent(mkType, eventType, region, id); } @Override public void recordEvent(Event evt) { events.add(evt); } @Override public List<Event> findEvents(Map<String, String> query, Date after) { return events; } @Override public List<Event> findEvents(MonkeyType mkeyType, Map<String, String> query, Date after) { // used from BasicScheduler return events; } @Override public List<Event> findEvents(MonkeyType mkeyType, EventType eventType, Map<String, String> query, Date after) { // used from ChaosMonkey List<Event> evts = new LinkedList<Event>(); for (Event evt : events) { if (query.get("groupName").equals(evt.field("groupName")) && evt.monkeyType() == mkeyType && evt.eventType() == eventType && evt.eventTime().after(after)) { evts.add(evt); } } return evts; } }; @Override public MonkeyRecorder recorder() { return recorder; } @Override public void reportEvent(Event evt) { eventReport.add(evt); } @Override public void resetEventReport() { eventReport.clear(); } @Override public String getEventReport() { StringBuilder report = new StringBuilder(); for (Event event : eventReport) { report.append(event.eventType()); report.append(" "); report.append(event.id()); } return report.toString(); } }
package com.tyczj.extendedcalendarview; import java.util.Calendar; import java.util.Locale; import java.util.concurrent.atomic.AtomicInteger; import android.annotation.SuppressLint; import android.content.Context; import android.graphics.Bitmap; import android.graphics.drawable.Drawable; import android.os.Build; import android.util.AttributeSet; import android.view.GestureDetector; import android.view.GestureDetector.SimpleOnGestureListener; import android.view.MotionEvent; import android.view.View; import android.view.View.OnClickListener; import android.widget.AdapterView; import android.widget.AdapterView.OnItemClickListener; import android.widget.GridView; import android.widget.ImageButton; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.RelativeLayout; import android.widget.TextView; public class ExtendedCalendarView extends RelativeLayout implements OnItemClickListener, OnClickListener{ private Context context; private OnDayClickListener dayListener; private GridView calendar; private CalendarAdapter mAdapter; private Calendar cal; private TextView month; private RelativeLayout base; private ImageView next,prev; private int gestureType = 0; private final GestureDetector calendarGesture = new GestureDetector(context,new GestureListener()); public static final int NO_GESTURE = 0; public static final int LEFT_RIGHT_GESTURE = 1; public static final int UP_DOWN_GESTURE = 2; private static final int SWIPE_MIN_DISTANCE = 120; private static final int SWIPE_THRESHOLD_VELOCITY = 200; private static int MOUNT_PREV_ID=0; private static int MOUNT_NEXT_ID=1; public interface OnDayClickListener{ public void onDayClicked(AdapterView<?> adapter, View view, int position, long id, Day day); } public ExtendedCalendarView(Context context) { super(context); this.context = context; init(); } public ExtendedCalendarView(Context context, AttributeSet attrs) { super(context, attrs); this.context = context; init(); } public ExtendedCalendarView(Context context, AttributeSet attrs,int defStyle) { super(context, attrs, defStyle); this.context = context; init(); } private static final AtomicInteger sNextGeneratedId = new AtomicInteger(1); /** * Generate a value suitable for use in {@link #setId(int)}. * This value will not collide with ID values generated at build time by aapt for R.id. * * @return a generated ID value */ public static int generateViewIdCustom() { for (;;) { final int result = sNextGeneratedId.get(); // aapt-generated IDs have the high byte nonzero; clamp to the range under that. int newValue = result + 1; if (newValue > 0x00FFFFFF) newValue = 1; // Roll over to 1, not 0. if (sNextGeneratedId.compareAndSet(result, newValue)) { return result; } } } private void init(){ this.setLayoutParams(new LayoutParams(LayoutParams.WRAP_CONTENT,LayoutParams.WRAP_CONTENT)); cal = Calendar.getInstance(); base = new RelativeLayout(context); base.setLayoutParams(new LinearLayout.LayoutParams(LayoutParams.WRAP_CONTENT,LayoutParams.WRAP_CONTENT)); base.setMinimumHeight(50); base.setId(ExtendedCalendarView.generateViewIdCustom()); LayoutParams params = new LayoutParams(LayoutParams.WRAP_CONTENT,LayoutParams.WRAP_CONTENT); params.leftMargin = 16; params.topMargin = 50; params.addRule(RelativeLayout.ALIGN_PARENT_LEFT); params.addRule(RelativeLayout.CENTER_VERTICAL); prev = new ImageButton(context); MOUNT_PREV_ID=ExtendedCalendarView.generateViewIdCustom(); prev.setId(MOUNT_PREV_ID); prev.setLayoutParams(params); prev.setImageResource(R.drawable.navigation_previous_item); prev.setOnClickListener(this); base.addView(prev); params = new LayoutParams(LayoutParams.WRAP_CONTENT,LayoutParams.WRAP_CONTENT); params.addRule(RelativeLayout.CENTER_HORIZONTAL); params.addRule(RelativeLayout.CENTER_VERTICAL); month = new TextView(context); month.setId(ExtendedCalendarView.generateViewIdCustom()); month.setLayoutParams(params); month.setTextAppearance(context, android.R.style.TextAppearance_Large); month.setText(ExtendedCalendarView.capitalize(cal.getDisplayName(Calendar.MONTH, Calendar.LONG, Locale.getDefault()))+" "+cal.get(Calendar.YEAR)); month.setTextSize(25); base.addView(month); params = new LayoutParams(LayoutParams.WRAP_CONTENT,LayoutParams.WRAP_CONTENT); params.rightMargin = 16; params.topMargin = 50; params.addRule(RelativeLayout.ALIGN_PARENT_RIGHT); params.addRule(RelativeLayout.CENTER_VERTICAL); next = new ImageButton(context); next.setImageResource(R.drawable.navigation_next_item); next.setLayoutParams(params); MOUNT_NEXT_ID=ExtendedCalendarView.generateViewIdCustom(); next.setId(MOUNT_NEXT_ID); next.setOnClickListener(this); base.addView(next); LayoutParams lpar = new LayoutParams(LayoutParams.WRAP_CONTENT,LayoutParams.WRAP_CONTENT); addView(base,lpar); params = new LayoutParams(LayoutParams.WRAP_CONTENT,LayoutParams.WRAP_CONTENT); params.addRule(RelativeLayout.ALIGN_PARENT_LEFT); params.addRule(RelativeLayout.ALIGN_PARENT_BOTTOM); params.addRule(RelativeLayout.BELOW, base.getId()); calendar = new GridView(context); calendar.setLayoutParams(params); calendar.setVerticalSpacing(2); calendar.setHorizontalSpacing(4); calendar.setNumColumns(7); calendar.setChoiceMode(GridView.CHOICE_MODE_SINGLE); calendar.setDrawSelectorOnTop(false); mAdapter = new CalendarAdapter(context,cal); calendar.setAdapter(mAdapter); calendar.setOnTouchListener(new OnTouchListener() { @SuppressLint("ClickableViewAccessibility") @Override public boolean onTouch(View v, MotionEvent event) { return calendarGesture.onTouchEvent(event); } }); LayoutParams calpar = new LayoutParams(LayoutParams.WRAP_CONTENT,(int)getContext().getResources().getDimension(R.dimen.min_cell_height)*7); calpar.addRule(RelativeLayout.BELOW,base.getId()); addView(calendar,calpar); } private class GestureListener extends SimpleOnGestureListener { @Override public boolean onFling(MotionEvent e1, MotionEvent e2, float velocityX,float velocityY) { if(gestureType == LEFT_RIGHT_GESTURE){ if (e1.getX() - e2.getX() > SWIPE_MIN_DISTANCE && Math.abs(velocityX) > SWIPE_THRESHOLD_VELOCITY) { nextMonth(); return true; // Right to left } else if (e2.getX() - e1.getX() > SWIPE_MIN_DISTANCE && Math.abs(velocityX) > SWIPE_THRESHOLD_VELOCITY) { previousMonth(); return true; // Left to right } }else if(gestureType == UP_DOWN_GESTURE){ if (e1.getY() - e2.getY() > SWIPE_MIN_DISTANCE && Math.abs(velocityY) > SWIPE_THRESHOLD_VELOCITY) { nextMonth(); return true; // Bottom to top } else if (e2.getY() - e1.getY() > SWIPE_MIN_DISTANCE && Math.abs(velocityY) > SWIPE_THRESHOLD_VELOCITY) { previousMonth(); return true; // Top to bottom } } return false; } } @Override public void onItemClick(AdapterView<?> arg0, View arg1, int arg2, long arg3) { if(dayListener != null){ Day d = (Day) mAdapter.getItem(arg2); if(d.getDay() != 0){ dayListener.onDayClicked(arg0, arg1, arg2, arg3,d); } } } /** * * @param listener * * Set a listener for when you press on a day in the month */ public void setOnDayClickListener(OnDayClickListener listener){ if(calendar != null){ dayListener = listener; calendar.setOnItemClickListener(this); } } @Override public void onClick(View v) { int vId = v.getId(); if(vId==MOUNT_PREV_ID){ previousMonth(); }else if(vId==MOUNT_NEXT_ID){ nextMonth(); } } private void previousMonth(){ if(cal.get(Calendar.MONTH) == cal.getActualMinimum(Calendar.MONTH)) { cal.set((cal.get(Calendar.YEAR)-1),cal.getActualMaximum(Calendar.MONTH),1); } else { cal.set(Calendar.MONTH,cal.get(Calendar.MONTH)-1); } rebuildCalendar(); } private void nextMonth(){ if(cal.get(Calendar.MONTH) == cal.getActualMaximum(Calendar.MONTH)) { cal.set((cal.get(Calendar.YEAR)+1),cal.getActualMinimum(Calendar.MONTH),1); } else { cal.set(Calendar.MONTH,cal.get(Calendar.MONTH)+1); } rebuildCalendar(); } private void rebuildCalendar(){ if(month != null){ month.setText(ExtendedCalendarView.capitalize(cal.getDisplayName(Calendar.MONTH, Calendar.LONG, Locale.getDefault()))+" "+cal.get(Calendar.YEAR)); refreshCalendar(); } } public static String capitalize(String str){ StringBuilder sb = new StringBuilder(); sb.append(str); sb.setCharAt(0, Character.toUpperCase(sb.charAt(0))); return sb.toString(); } /** * Refreshes the month */ public void refreshCalendar(){ mAdapter.refreshDays(); mAdapter.notifyDataSetChanged(); } /** * * @param color * * Sets the background color of the month bar */ public void setMonthTextBackgroundColor(int color){ base.setBackgroundColor(color); } @SuppressLint("NewApi") /** * * @param drawable * * Sets the background color of the month bar. Requires at least API level 16 */ public void setMonthTextBackgroundDrawable(Drawable drawable){ if(Build.VERSION.SDK_INT > Build.VERSION_CODES.ICE_CREAM_SANDWICH_MR1){ base.setBackground(drawable); } } /** * * @param resource * * Sets the background color of the month bar */ public void setMonehtTextBackgroundResource(int resource){ base.setBackgroundResource(resource); } /** * * @param recource * * change the image of the previous month button */ public void setPreviousMonthButtonImageResource(int recource){ prev.setImageResource(recource); } /** * * @param bitmap * * change the image of the previous month button */ public void setPreviousMonthButtonImageBitmap(Bitmap bitmap){ prev.setImageBitmap(bitmap); } /** * * @param drawable * * change the image of the previous month button */ public void setPreviousMonthButtonImageDrawable(Drawable drawable){ prev.setImageDrawable(drawable); } /** * * @param recource * * change the image of the next month button */ public void setNextMonthButtonImageResource(int recource){ next.setImageResource(recource); } /** * * @param bitmap * * change the image of the next month button */ public void setNextMonthButtonImageBitmap(Bitmap bitmap){ next.setImageBitmap(bitmap); } /** * * @param drawable * * change the image of the next month button */ public void setNextMonthButtonImageDrawable(Drawable drawable){ next.setImageDrawable(drawable); } /** * * @param gestureType * * Allow swiping the calendar left/right or up/down to change the month. * * Default value no gesture */ public void setGesture(int gestureType){ this.gestureType = gestureType; } }
/* * Copyright (C) 2014 Qiujuer <qiujuer@live.cn> * WebSite http://www.qiujuer.net * Created 02/16/2015 * Changed 07/24/2015 * Version 2.1.0 * Author Qiujuer * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.qiujuer.genius.ui.drawable; import android.content.res.ColorStateList; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.ColorFilter; import android.graphics.Paint; import android.graphics.PixelFormat; import android.graphics.PorterDuff; import android.graphics.PorterDuffColorFilter; import net.qiujuer.genius.ui.Ui; import java.lang.reflect.Method; /** * A drawable that changes it's Paint color depending on the {@link StateColorDrawable(ColorStateList)} State * <p> * Subclasses should implement {@link #draw(android.graphics.Canvas, android.graphics.Paint)} * </p> */ public abstract class StatePaintDrawable extends StateColorDrawable { private PorterDuffColorFilter mTintFilter; private ColorStateList mTint = null; private PorterDuff.Mode mTintMode = PorterDuff.Mode.SRC_IN; protected final Paint mPaint; /** * Initializes local dynamic properties from state. This should be called * after significant state changes, e.g. from the One True Constructor and * after inflating or applying a theme. */ public StatePaintDrawable(ColorStateList tintStateList) { super(tintStateList); mPaint = new Paint(Paint.ANTI_ALIAS_FLAG); mPaint.setColor(getColor()); mTintFilter = updateTintFilter(mTintFilter, mTint, mTintMode); } @Override public void setColorFilter(ColorFilter cf) { final Paint paint = mPaint; if (paint != null && paint.getColorFilter() != cf) { paint.setColorFilter(cf); invalidateSelf(); } } @Override public int getOpacity() { final Paint p = mPaint; if (p.getXfermode() == null) { final int alpha = p.getAlpha(); if (alpha == 0) { return PixelFormat.TRANSPARENT; } if (alpha == 255) { return PixelFormat.OPAQUE; } } // not sure, so be safe return PixelFormat.TRANSLUCENT; } @Override public void setDither(boolean dither) { mPaint.setDither(dither); invalidateSelf(); } @Override public void setTintList(ColorStateList tint) { if (mTint != tint) { mTint = tint; mTintFilter = updateTintFilter(mTintFilter, tint, mTintMode); invalidateSelf(); } } @Override public void setTintMode(PorterDuff.Mode tintMode) { if (tintMode != mTintMode || tintMode.compareTo(mTintMode) != 0) { mTintMode = tintMode; mTintFilter = updateTintFilter(mTintFilter, mTint, tintMode); invalidateSelf(); } } @Override public boolean isStateful() { return super.isStateful() || (mTint != null && mTint.isStateful()); } @Override protected boolean onStateChange(int[] stateSet) { boolean changed = super.onStateChange(stateSet); if (mTint != null && mTintMode != null) { mTintFilter = updateTintFilter(mTintFilter, mTint, mTintMode); return true; } return changed; } @Override protected void onColorChange(int color) { final Paint paint = mPaint; if (paint != null && paint.getColor() != color) paint.setColor(color); } @Override public void draw(Canvas canvas) { final Paint paint = mPaint; final int prevAlpha = paint.getAlpha(); paint.setAlpha(Ui.modulateAlpha(prevAlpha, getAlpha())); // only draw shape if it may affect output if (paint.getAlpha() != 0 || paint.getXfermode() != null /*|| paint.hasShadowLayer()*/) { final boolean clearColorFilter; if (mTintFilter != null && paint.getColorFilter() == null) { paint.setColorFilter(mTintFilter); clearColorFilter = true; } else { clearColorFilter = false; } // call draw draw(canvas, mPaint); if (clearColorFilter) { paint.setColorFilter(null); } } // restore paint.setAlpha(prevAlpha); } /** * Returns the Paint used to draw the shape. * * @return mPaint */ public Paint getPaint() { return mPaint; } /** * Subclasses should implement this method to do the actual drawing * * @param canvas The current {@link android.graphics.Canvas} to draw into * @param paint The {@link android.graphics.Paint} the Paint object that defines with the current * {@link android.content.res.ColorStateList} color */ public abstract void draw(Canvas canvas, Paint paint); // other subclass could wack the Shader's localmatrix based on the // resize params (e.g. scaletofit, etc.). This could be used to scale // a bitmap to fill the bounds without needing any other special casing. /** * Ensures the tint filter is consistent with the current tint color and * mode. */ PorterDuffColorFilter updateTintFilter(PorterDuffColorFilter tintFilter, ColorStateList tint, PorterDuff.Mode tintMode) { if (tint == null || tintMode == null) { return null; } final int color = tint.getColorForState(getState(), Color.TRANSPARENT); if (tintFilter == null) { return new PorterDuffColorFilter(color, tintMode); } //tintFilter.setColor(color); //tintFilter.setMode(tintMode); try { Class<PorterDuffColorFilter> tClass = (Class<PorterDuffColorFilter>) tintFilter.getClass(); Method method = tClass.getMethod("setColor", Integer.class); method.invoke(tintFilter, color); method = tClass.getMethod("setMode", PorterDuff.Mode.class); method.invoke(tintFilter, tintMode); return tintFilter; } catch (Exception e) { return new PorterDuffColorFilter(color, tintMode); } } }
package com.guoxiaoxing.music.util; import android.app.Activity; import android.content.ContentResolver; import android.content.ContentUris; import android.content.Context; import android.content.Intent; import android.content.pm.PackageManager; import android.database.Cursor; import android.graphics.Color; import android.media.audiofx.AudioEffect; import android.net.Uri; import android.os.Build; import android.provider.BaseColumns; import android.provider.MediaStore; import android.support.annotation.NonNull; import android.support.v7.widget.RecyclerView; import android.util.Log; import android.util.TypedValue; import android.widget.Toast; import com.afollestad.materialdialogs.DialogAction; import com.afollestad.materialdialogs.MaterialDialog; import com.guoxiaoxing.music.MusicPlayer; import com.guoxiaoxing.music.R; import com.guoxiaoxing.music.provider.RecentStore; import com.guoxiaoxing.music.provider.SongPlayCount; import java.io.File; public class TimberUtils { public static final String MUSIC_ONLY_SELECTION = MediaStore.Audio.AudioColumns.IS_MUSIC + "=1" + " AND " + MediaStore.Audio.AudioColumns.TITLE + " != ''"; public static boolean isMarshmallow() { return Build.VERSION.SDK_INT >= Build.VERSION_CODES.M; } public static boolean isLollipop() { return Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP; } public static boolean isJellyBeanMR2() { return Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2; } public static boolean isJellyBean() { return Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN; } public static boolean isJellyBeanMR1() { return Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1; } public static Uri getAlbumArtUri(long paramInt) { return ContentUris.withAppendedId(Uri.parse("content://media/external/audio/albumart"), paramInt); } public static final String makeCombinedString(final Context context, final String first, final String second) { final String formatter = context.getResources().getString(R.string.combine_two_strings); return String.format(formatter, first, second); } public static final String makeLabel(final Context context, final int pluralInt, final int number) { return context.getResources().getQuantityString(pluralInt, number, number); } public static final String makeShortTimeString(final Context context, long secs) { long hours, mins; hours = secs / 3600; secs %= 3600; mins = secs / 60; secs %= 60; final String durationFormat = context.getResources().getString( hours == 0 ? R.string.durationformatshort : R.string.durationformatlong); return String.format(durationFormat, hours, mins, secs); } public static int getActionBarHeight(Context context) { int mActionBarHeight; TypedValue mTypedValue = new TypedValue(); context.getTheme().resolveAttribute(R.attr.actionBarSize, mTypedValue, true); mActionBarHeight = TypedValue.complexToDimensionPixelSize(mTypedValue.data, context.getResources().getDisplayMetrics()); return mActionBarHeight; } public static final int getSongCountForPlaylist(final Context context, final long playlistId) { Cursor c = context.getContentResolver().query( MediaStore.Audio.Playlists.Members.getContentUri("external", playlistId), new String[]{BaseColumns._ID}, MUSIC_ONLY_SELECTION, null, null); if (c != null) { int count = 0; if (c.moveToFirst()) { count = c.getCount(); } c.close(); c = null; return count; } return 0; } public static boolean hasEffectsPanel(final Activity activity) { final PackageManager packageManager = activity.getPackageManager(); return packageManager.resolveActivity(createEffectsIntent(), PackageManager.MATCH_DEFAULT_ONLY) != null; } public static Intent createEffectsIntent() { final Intent effects = new Intent(AudioEffect.ACTION_DISPLAY_AUDIO_EFFECT_CONTROL_PANEL); effects.putExtra(AudioEffect.EXTRA_AUDIO_SESSION, MusicPlayer.getAudioSessionId()); return effects; } public static int getBlackWhiteColor(int color) { double darkness = 1 - (0.299 * Color.red(color) + 0.587 * Color.green(color) + 0.114 * Color.blue(color)) / 255; if (darkness >= 0.5) { return Color.WHITE; } else return Color.BLACK; } public enum IdType { NA(0), Artist(1), Album(2), Playlist(3); public final int mId; IdType(final int id) { mId = id; } public static IdType getTypeById(int id) { for (IdType type : values()) { if (type.mId == id) { return type; } } throw new IllegalArgumentException("Unrecognized id: " + id); } } public enum PlaylistType { LastAdded(-1, R.string.playlist_last_added), RecentlyPlayed(-2, R.string.playlist_recently_played), TopTracks(-3, R.string.playlist_top_tracks); public long mId; public int mTitleId; PlaylistType(long id, int titleId) { mId = id; mTitleId = titleId; } public static PlaylistType getTypeById(long id) { for (PlaylistType type : PlaylistType.values()) { if (type.mId == id) { return type; } } return null; } } public static void removeFromPlaylist(final Context context, final long id, final long playlistId) { final Uri uri = MediaStore.Audio.Playlists.Members.getContentUri("external", playlistId); final ContentResolver resolver = context.getContentResolver(); resolver.delete(uri, MediaStore.Audio.Playlists.Members.AUDIO_ID + " = ? ", new String[]{ Long.toString(id) }); } public static void clearTopTracks(Context context) { SongPlayCount.getInstance(context).deleteAll(); } public static void clearRecent(Context context) { RecentStore.getInstance(context).deleteAll(); } public static void clearLastAdded(Context context) { PreferencesUtility.getInstance(context) .setLastAddedCutoff(System.currentTimeMillis()); } public static void showDeleteDialog(final Context context, final String name, final long[] list, final RecyclerView.Adapter adapter, final int pos) { new MaterialDialog.Builder(context) .title("Delete song?") .content("Are you sure you want to delete " + name + " ?") .positiveText("Delete") .negativeText("Cancel") .onPositive(new MaterialDialog.SingleButtonCallback() { @Override public void onClick(@NonNull MaterialDialog dialog, @NonNull DialogAction which) { TimberUtils.deleteTracks(context, list); adapter.notifyItemRemoved(pos); } }) .onNegative(new MaterialDialog.SingleButtonCallback() { @Override public void onClick(@NonNull MaterialDialog dialog, @NonNull DialogAction which) { dialog.dismiss(); } }) .show(); } public static void deleteTracks(final Context context, final long[] list) { final String[] projection = new String[]{ BaseColumns._ID, MediaStore.MediaColumns.DATA, MediaStore.Audio.AudioColumns.ALBUM_ID }; final StringBuilder selection = new StringBuilder(); selection.append(BaseColumns._ID + " IN ("); for (int i = 0; i < list.length; i++) { selection.append(list[i]); if (i < list.length - 1) { selection.append(","); } } selection.append(")"); final Cursor c = context.getContentResolver().query( MediaStore.Audio.Media.EXTERNAL_CONTENT_URI, projection, selection.toString(), null, null); if (c != null) { // Step 1: Remove selected tracks from the current playlist, as well // as from the album art cache c.moveToFirst(); while (!c.isAfterLast()) { // Remove from current playlist final long id = c.getLong(0); MusicPlayer.removeTrack(id); // Remove the track from the play count SongPlayCount.getInstance(context).removeItem(id); // Remove any items in the recents database RecentStore.getInstance(context).removeItem(id); c.moveToNext(); } // Step 2: Remove selected tracks from the database context.getContentResolver().delete(MediaStore.Audio.Media.EXTERNAL_CONTENT_URI, selection.toString(), null); // Step 3: Remove files from card c.moveToFirst(); while (!c.isAfterLast()) { final String name = c.getString(1); final File f = new File(name); try { // File.delete can throw a security exception if (!f.delete()) { // I'm not sure if we'd ever get here (deletion would // have to fail, but no exception thrown) Log.e("MusicUtils", "Failed to delete file " + name); } c.moveToNext(); } catch (final SecurityException ex) { c.moveToNext(); } } c.close(); } final String message = makeLabel(context, R.plurals.NNNtracksdeleted, list.length); Toast.makeText(context, message, Toast.LENGTH_SHORT).show(); context.getContentResolver().notifyChange(Uri.parse("content://media"), null); MusicPlayer.refresh(); } }
/* Copyright (c) 2017 lib4j * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * You should have received a copy of The MIT License (MIT) along with this * program. If not, see <http://opensource.org/licenses/MIT/>. */ package org.libx4j.rdb.jsql; import java.lang.reflect.Type; import java.math.BigDecimal; import java.math.BigInteger; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.Map; import java.util.Set; import org.lib4j.lang.Classes; public class DMLGenerator { public static class Args { public final Class<?> a; public final Class<?> b; public Args(final Class<?> a, final Class<?> b) { this.a = a; this.b = b; } @Override public boolean equals(final Object obj) { if (obj == this) return true; if (!(obj instanceof Args)) return false; final Args that = (Args)obj; return a == that.a && b == that.b; } @Override public int hashCode() { return a.hashCode() ^ b.hashCode(); } @Override public String toString() { return "(" + DMLGenerator.getName(a) + ", " + DMLGenerator.getName(b) + ")"; } } private static final Class<?>[] types = new Class<?>[] { type.FLOAT.class, type.DOUBLE.class, type.TINYINT.class, type.SMALLINT.class, type.INT.class, type.BIGINT.class, type.DECIMAL.class }; private static final Map<Args,Class<?>> scaledMap = new HashMap<Args,Class<?>>(); private static final Map<Args,Class<?>> directMap = new HashMap<Args,Class<?>>(); private static final Map<Class<?>,Class<?>> singleMap = new LinkedHashMap<Class<?>,Class<?>>(); private static final void put(final Map<Args,Class<?>> map, final Class<?> r, final Class<?> a, final Class<?> b) { final Args args = new Args(a, b); // final Class<?> exists = map.get(args); // if (exists != null && exists != r) // System.err.println("WARNING: " + args + ": " + getName(exists) + " with " + getName(r)); map.put(args, r); if (type.Numeric.class.isAssignableFrom(b)) map.put(new Args(a, getGenericType(b)), r); if (type.Numeric.class.isAssignableFrom(a)) map.put(new Args(b, getGenericType(a)), r); } private static final void putApprox(final Class<?> r, final Class<?> a, final Class<?> b, final boolean includeScaled) { if (includeScaled) put(scaledMap, r, a, b); put(directMap, r, a, b); } private static final void putApproxs(final Class<?> a, final Class<?> b, final Class<?> r) { put(a, b, r, true); } private static final void putDirect(final Class<?> a, final Class<?> b, final Class<?> r) { put(a, b, r, false); } private static final void put(final Class<?> a, final Class<?> b, final Class<?> r, final boolean includeScaled) { putApprox(r, a, b, includeScaled); final Class<?> ua = getUnsignedClass(a); final boolean bUnsigned = kind.Numeric.UNSIGNED.class.isAssignableFrom(b); final Class<?> ur = getUnsignedClass(r); putApprox(bUnsigned ? ur : r, ua, b, includeScaled); if (!bUnsigned) { final Class<?> ub = getUnsignedClass(b); putApprox(r, a, ub, includeScaled); putApprox(ur, ua, ub, includeScaled); } } private static Class<?> getGenericType(final Class<?> cls) { final Type[] genericTypes = Classes.getGenericSuperclasses(cls); final Class<?> generic = genericTypes != null ? (Class<?>)genericTypes[0] : getGenericType(cls.getSuperclass()); return kind.Numeric.UNSIGNED.class.isAssignableFrom(cls) ? getUnsignedPrimitive(generic) : generic; } private static Class<?> getUnsignedPrimitive(final Class<?> cls) { return cls == Float.class ? UNSIGNED.Float.class : cls == Double.class ? UNSIGNED.Double.class : cls == BigDecimal.class ? UNSIGNED.BigDecimal.class : cls == Short.class ? UNSIGNED.Byte.class : cls == Integer.class ? UNSIGNED.Short.class : cls == Long.class ? UNSIGNED.Integer.class : cls == BigInteger.class ? UNSIGNED.Long.class : null; } private static Class<?> getUnsignedClass(final Class<?> cls) { final Class<?> unsignedClass = cls.getClasses()[0]; assert(unsignedClass.getSimpleName().equals("UNSIGNED")); return unsignedClass; } static { for (final Class<?> cls : types) { if (type.ApproxNumeric.class.isAssignableFrom(cls)) { singleMap.put(cls, cls); final Class<?> unsignedType = getUnsignedClass(cls); singleMap.put(unsignedType, unsignedType); } } singleMap.put(type.TINYINT.class, type.FLOAT.class); singleMap.put(type.TINYINT.UNSIGNED.class, type.FLOAT.UNSIGNED.class); singleMap.put(type.SMALLINT.class, type.FLOAT.class); singleMap.put(type.SMALLINT.UNSIGNED.class, type.FLOAT.UNSIGNED.class); singleMap.put(type.INT.class, type.FLOAT.class); singleMap.put(type.INT.UNSIGNED.class, type.DOUBLE.UNSIGNED.class); singleMap.put(type.BIGINT.class, type.DOUBLE.class); singleMap.put(type.BIGINT.UNSIGNED.class, type.DOUBLE.UNSIGNED.class); singleMap.put(type.DECIMAL.class, type.DECIMAL.class); singleMap.put(type.DECIMAL.UNSIGNED.class, type.DECIMAL.UNSIGNED.class); assert(singleMap.size() == 14); putApproxs(type.FLOAT.class, type.FLOAT.class, type.FLOAT.class); putApproxs(type.FLOAT.class, type.DOUBLE.class, type.DOUBLE.class); putApproxs(type.FLOAT.class, type.TINYINT.class, type.FLOAT.class); putApproxs(type.FLOAT.class, type.SMALLINT.class, type.FLOAT.class); putApproxs(type.FLOAT.class, type.INT.class, type.FLOAT.class); putApproxs(type.FLOAT.class, type.INT.UNSIGNED.class, type.DOUBLE.class); putApproxs(type.FLOAT.class, type.BIGINT.class, type.DOUBLE.class); putApproxs(type.FLOAT.class, type.DECIMAL.class, type.DECIMAL.class); putApproxs(type.DOUBLE.class, type.DOUBLE.class, type.DOUBLE.class); putApproxs(type.DOUBLE.class, type.TINYINT.class, type.DOUBLE.class); putApproxs(type.DOUBLE.class, type.SMALLINT.class, type.DOUBLE.class); putApproxs(type.DOUBLE.class, type.INT.class, type.DOUBLE.class); putApproxs(type.DOUBLE.class, type.BIGINT.class, type.DOUBLE.class); putApproxs(type.DOUBLE.class, type.DECIMAL.class, type.DECIMAL.class); putApproxs(type.TINYINT.class, type.TINYINT.class, type.FLOAT.class); putApproxs(type.TINYINT.class, type.SMALLINT.class, type.FLOAT.class); putApproxs(type.TINYINT.class, type.INT.class, type.FLOAT.class); putApproxs(type.TINYINT.class, type.INT.UNSIGNED.class, type.DOUBLE.class); putApproxs(type.TINYINT.class, type.BIGINT.class, type.DOUBLE.class); putApproxs(type.TINYINT.class, type.DECIMAL.class, type.DECIMAL.class); putApproxs(type.SMALLINT.class, type.SMALLINT.class, type.FLOAT.class); putApproxs(type.SMALLINT.class, type.INT.class, type.FLOAT.class); putApproxs(type.SMALLINT.class, type.INT.UNSIGNED.class, type.DOUBLE.class); putApproxs(type.SMALLINT.class, type.BIGINT.class, type.DOUBLE.class); putApproxs(type.SMALLINT.class, type.DECIMAL.class, type.DECIMAL.class); putApproxs(type.INT.class, type.INT.class, type.FLOAT.class); putApproxs(type.INT.class, type.INT.UNSIGNED.class, type.DOUBLE.class); putApproxs(type.INT.class, type.BIGINT.class, type.DOUBLE.class); putApproxs(type.INT.class, type.DECIMAL.class, type.DECIMAL.class); putApproxs(type.BIGINT.class, type.BIGINT.class, type.DOUBLE.class); putApproxs(type.BIGINT.class, type.DECIMAL.class, type.DECIMAL.class); putApproxs(type.DECIMAL.class, type.DECIMAL.class, type.DECIMAL.class); putDirect(type.TINYINT.class, type.TINYINT.class, type.TINYINT.class); putDirect(type.TINYINT.class, type.SMALLINT.class, type.SMALLINT.class); putDirect(type.TINYINT.class, type.INT.class, type.INT.class); putDirect(type.TINYINT.class, type.BIGINT.class, type.BIGINT.class); putDirect(type.SMALLINT.class, type.SMALLINT.class, type.SMALLINT.class); putDirect(type.SMALLINT.class, type.INT.class, type.INT.class); putDirect(type.SMALLINT.class, type.BIGINT.class, type.BIGINT.class); putDirect(type.INT.class, type.INT.class, type.INT.class); putDirect(type.INT.class, type.BIGINT.class, type.BIGINT.class); putDirect(type.BIGINT.class, type.BIGINT.class, type.BIGINT.class); } private static final String[] singleParamFunctions = new String[] { "$1 ROUND(final $2 a) {\n return ($1)$n1.wrapper(new function.Round(a, 0));\n}", "$1 ROUND(final $2 a, final int scale) {\n return ($1)$n1.wrapper(new function.Round(a, scale));\n}", "$1 ABS(final $2 a) {\n return ($1)$n1.wrapper(new function.Abs(a));\n}", "$1 FLOOR(final $2 a) {\n return ($1)$n1.wrapper(new function.Floor(a));\n}", "$1 CEIL(final $2 a) {\n return ($1)$n1.wrapper(new function.Ceil(a));\n}", "$1 SQRT(final $2 a) {\n return ($1)$n1.wrapper(new function.Sqrt(a));\n}", "$1 EXP(final $2 a) {\n return ($1)$n1.wrapper(new function.Exp(a));\n}", "$1 LN(final $2 a) {\n return ($1)$n1.wrapper(new function.Ln(a));\n}", "$1 LOG2(final $2 a) {\n return ($1)$n1.wrapper(new function.Log2(a));\n}", "$1 LOG10(final $2 a) {\n return ($1)$n1.wrapper(new function.Log10(a));\n}", "$1 SIN(final $2 a) {\n return ($1)$n1.wrapper(new function.Sin(a));\n}", "$1 ASIN(final $2 a) {\n return ($1)$n1.wrapper(new function.Asin(a));\n}", "$1 COS(final $2 a) {\n return ($1)$n1.wrapper(new function.Cos(a));\n}", "$1 ACOS(final $2 a) {\n return ($1)$n1.wrapper(new function.Acos(a));\n}", "$1 TAN(final $2 a) {\n return ($1)$n1.wrapper(new function.Tan(a));\n}", "$1 ATAN(final $2 a) {\n return ($1)$n1.wrapper(new function.Atan(a));\n}" }; private static final String[] doubleParamFunctions = new String[] { "$1 POW(final $2 a, final $3 b) {\n return ($1)$n1.wrapper(new function.Pow(a, b));\n}", "$1 MOD(final $2 a, final $3 b) {\n return ($1)$n1.wrapper(new function.Mod(a, b));\n}", "$1 LOG(final $2 a, final $3 b) {\n return ($1)$n1.wrapper(new function.Log(a, b));\n}", "$1 ATAN2(final $2 a, final $3 b) {\n return ($1)$n1.wrapper(new function.Atan2(a, b));\n}" }; private static final String[] numericExpressionsDirect = new String[] { "$1 ADD(final $2 a, final $3 b) {\n return ($1)$n1.wrapper(new NumericExpression(Operator.PLUS, a, b));\n}", "$1 SUB(final $2 a, final $3 b) {\n return ($1)$n1.wrapper(new NumericExpression(Operator.MINUS, a, b));\n}", "$1 MUL(final $2 a, final $3 b) {\n return ($1)$n1.wrapper(new NumericExpression(Operator.MULTIPLY, a, b));\n}" }; private static final String[] numericExpressionsScaled = new String[] { "$1 DIV(final $2 a, final $3 b) {\n return ($1)$n1.wrapper(new NumericExpression(Operator.DIVIDE, a, b));\n}" }; public static String getName(final Class<?> cls) { if (cls == Float.class) return "float"; if (cls == Double.class) return "double"; if (cls == Byte.class) return "byte"; if (cls == Short.class) return "short"; if (cls == Integer.class) return "int"; if (cls == Long.class) return "long"; if (cls == BigInteger.class || cls == BigDecimal.class) return cls.getSimpleName(); int index = cls.getName().indexOf("type$"); final String strictName = Classes.getStrictName(cls); if (index != -1) return strictName.substring(index) + (cls == type.Numeric.class ? "<?>" : ""); index = cls.getName().indexOf("UNS"); return strictName.substring(index); } private static String newInstance(final Class<?> a, final Class<?> b, final Class<?> c) { if (a == type.FLOAT.class || a == type.DOUBLE.class || a == type.DECIMAL.class) { if (b == type.FLOAT.class || b == type.DOUBLE.class || b == type.DECIMAL.class) { if (c == null || c == type.FLOAT.class || c == type.DOUBLE.class || c == type.DECIMAL.class || kind.Numeric.UNSIGNED.class.isAssignableFrom(c)) { final String ub = c == type.FLOAT.class || c == type.DOUBLE.class || c == type.DECIMAL.class ? " && b.unsigned()" : ""; return "(a.unsigned()" + ub + " ? new " + getName(a) + ".UNSIGNED($p) : new " + getName(a) + "($p))"; } } if (kind.Numeric.UNSIGNED.class.isAssignableFrom(b)) return "new " + getName(a) + ".UNSIGNED($p)"; } return "new " + getName(a) + "($p)"; } private static String compile(final String function, final Class<?> a, final Class<?> b, final Class<?> c, final boolean checkBothUnsigned) { final boolean bIsNumeric = type.Numeric.class.isAssignableFrom(b); String compiled = "public static final " + function.replace("$n1", newInstance(a, b, checkBothUnsigned ? c : null)).replace("$1", getName(a)).replace("$2", getName(b)) + "\n"; if (c != null) compiled = compiled.replace("$3", getName(c)); final String numericVar = bIsNumeric ? "a" : "b"; return a == type.DECIMAL.class || a == type.DECIMAL.UNSIGNED.class ? compiled.replace("$p", numericVar + ".precision(), " + numericVar + ".scale()") : type.ExactNumeric.class.isAssignableFrom(a) ? compiled.replace("$p", numericVar + ".precision()") : compiled.replace("$p", ""); } private static void printSingles() { for (final String function : singleParamFunctions) for (final Map.Entry<Class<?>,Class<?>> entry : singleMap.entrySet()) System.out.println(compile(function, entry.getValue(), entry.getKey(), null, false)); } private static void printDoubles() { for (final String function : doubleParamFunctions) { for (final Map.Entry<Args,Class<?>> entry : scaledMap.entrySet()) { System.out.println(compile(function, entry.getValue(), entry.getKey().a, entry.getKey().b, false)); } } } private static void printNumericExpressions() { for (final String function : numericExpressionsDirect) for (final Map.Entry<Args,Class<?>> entry : directMap.entrySet()) { System.out.println(compile(function, entry.getValue(), entry.getKey().a, entry.getKey().b, true)); } for (final String function : numericExpressionsScaled) for (final Map.Entry<Args,Class<?>> entry : scaledMap.entrySet()) { System.out.println(compile(function, entry.getValue(), entry.getKey().a, entry.getKey().b, true)); } } private static void filter(final Map<Args,Class<?>> map) { final Set<Args> removes = new HashSet<Args>(); for (final Map.Entry<Args,Class<?>> entry : map.entrySet()) { final Args args = entry.getKey(); if (!type.Numeric.class.isAssignableFrom(args.b)) { if (args.b == Float.class && map.get(new Args(args.a, Double.class)) == entry.getValue()) removes.add(args); if (args.b == UNSIGNED.Float.class && map.get(new Args(args.a, UNSIGNED.Double.class)) == entry.getValue()) removes.add(args); if (args.b == Byte.class && map.get(new Args(args.a, Short.class)) == entry.getValue()) removes.add(args); if (args.b == UNSIGNED.Byte.class && map.get(new Args(args.a, UNSIGNED.Short.class)) == entry.getValue()) removes.add(args); if (args.b == Short.class && map.get(new Args(args.a, Integer.class)) == entry.getValue()) removes.add(args); if (args.b == UNSIGNED.Short.class && map.get(new Args(args.a, UNSIGNED.Integer.class)) == entry.getValue()) removes.add(args); if (args.b == Integer.class && map.get(new Args(args.a, Long.class)) == entry.getValue()) removes.add(args); if (args.b == UNSIGNED.Integer.class && map.get(new Args(args.a, UNSIGNED.Long.class)) == entry.getValue()) removes.add(args); if (!kind.Numeric.UNSIGNED.class.isAssignableFrom(entry.getValue()) && UNSIGNED.class.isAssignableFrom(args.b)) removes.add(args); } if (!kind.Numeric.UNSIGNED.class.isAssignableFrom(entry.getValue()) && (args.a == type.FLOAT.UNSIGNED.class || args.a == type.DOUBLE.UNSIGNED.class || args.a == type.DECIMAL.UNSIGNED.class)) removes.add(args); if (!kind.Numeric.UNSIGNED.class.isAssignableFrom(entry.getValue()) && (args.b == type.FLOAT.UNSIGNED.class || args.b == type.DOUBLE.UNSIGNED.class || args.b == type.DECIMAL.UNSIGNED.class)) removes.add(args); if (!kind.Numeric.UNSIGNED.class.isAssignableFrom(entry.getValue()) && (UNSIGNED.UnsignedNumber.class.isAssignableFrom(args.a) || UNSIGNED.UnsignedNumber.class.isAssignableFrom(args.b))) removes.add(args); } for (final Args args : removes) map.remove(args); } private static void trans(final Map<Args,Class<?>> map) { final Map<Args,Class<?>> trans = new HashMap<Args,Class<?>>(); for (final Map.Entry<Args,Class<?>> entry : map.entrySet()) { final Args args = entry.getKey(); trans.put(new Args(args.b, args.a), entry.getValue()); } map.putAll(trans); } public static void main(final String[] args) { filter(scaledMap); trans(scaledMap); filter(directMap); trans(directMap); // int total = 0; // for (final Map.Entry<Args,Class<?>> entry : scaledMap.entrySet()) // System.out.println(getName(entry.getValue()) + " (" + getName(entry.getKey().a) + ", " + getName(entry.getKey().b) + ")"); // System.err.println(scaledMap.size()); // printSingles(); printDoubles(); // printNumericExpressions(); } }
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.execution.junit; import com.intellij.execution.*; import com.intellij.execution.actions.ConfigurationContext; import com.intellij.execution.configurations.*; import com.intellij.execution.runners.ExecutionEnvironment; import com.intellij.openapi.extensions.ExtensionPointName; import com.intellij.openapi.extensions.Extensions; import com.intellij.openapi.module.Module; import com.intellij.openapi.options.SettingsEditor; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.InvalidDataException; import com.intellij.openapi.util.WriteExternalException; import com.intellij.psi.PsiElement; import org.jdom.Element; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.Comparator; import java.util.List; /** * @deprecated please use {@link com.intellij.execution.actions.RunConfigurationProducer} instead */ public abstract class RuntimeConfigurationProducer implements Comparable, Cloneable { public static final ExtensionPointName<RuntimeConfigurationProducer> RUNTIME_CONFIGURATION_PRODUCER = ExtensionPointName.create("com.intellij.configurationProducer"); public static final Comparator<RuntimeConfigurationProducer> COMPARATOR = new ProducerComparator(); protected static final int PREFERED = -1; private final ConfigurationFactory myConfigurationFactory; private RunnerAndConfigurationSettings myConfiguration; protected boolean isClone; public RuntimeConfigurationProducer(final ConfigurationType configurationType) { this(configurationType.getConfigurationFactories()[0]); } protected RuntimeConfigurationProducer(ConfigurationFactory configurationFactory) { myConfigurationFactory = configurationFactory; } public RuntimeConfigurationProducer createProducer(final Location location, final ConfigurationContext context) { final RuntimeConfigurationProducer result = clone(); result.myConfiguration = location != null ? result.createConfigurationByElement(location, context) : null; if (result.myConfiguration != null) { final PsiElement psiElement = result.getSourceElement(); final Location<PsiElement> _location = PsiLocation.fromPsiElement(psiElement, location.getModule()); if (_location != null) { // replace with existing configuration if any final RunManager runManager = RunManager.getInstance(context.getProject()); final ConfigurationType type = result.myConfiguration.getType(); RunnerAndConfigurationSettings configuration = null; if (type != null) { configuration = result.findExistingByElement(_location, runManager.getConfigurationSettingsList(type), context); } if (configuration != null) { result.myConfiguration = configuration; } else { runManager.setUniqueNameIfNeed(result.myConfiguration); } } } return result; } @Nullable public RunnerAndConfigurationSettings findExistingConfiguration(@NotNull Location location, ConfigurationContext context) { assert isClone; final RunManager runManager = RunManager.getInstance(location.getProject()); final List<RunnerAndConfigurationSettings> configurations = runManager.getConfigurationSettingsList(getConfigurationType()); return findExistingByElement(location, configurations, context); } public abstract PsiElement getSourceElement(); public RunnerAndConfigurationSettings getConfiguration() { assert isClone; return myConfiguration; } public void setConfiguration(RunnerAndConfigurationSettings configuration) { assert isClone; myConfiguration = configuration; } @Nullable protected abstract RunnerAndConfigurationSettings createConfigurationByElement(Location location, ConfigurationContext context); @Nullable protected RunnerAndConfigurationSettings findExistingByElement(final Location location, @NotNull final List<RunnerAndConfigurationSettings> existingConfigurations, ConfigurationContext context) { assert isClone; return null; } @Override public RuntimeConfigurationProducer clone() { assert !isClone; try { RuntimeConfigurationProducer clone = (RuntimeConfigurationProducer)super.clone(); clone.isClone = true; return clone; } catch (CloneNotSupportedException e) { throw new RuntimeException(e); } } protected RunnerAndConfigurationSettings cloneTemplateConfiguration(final Project project, @Nullable final ConfigurationContext context) { if (context != null) { final RunConfiguration original = context.getOriginalConfiguration(myConfigurationFactory.getType()); if (original != null) { final RunConfiguration c = original instanceof DelegatingRuntimeConfiguration? ((DelegatingRuntimeConfiguration)original).getPeer() : original; return RunManager.getInstance(project).createConfiguration(c.clone(), myConfigurationFactory); } } return RunManager.getInstance(project).createRunConfiguration("", myConfigurationFactory); } protected ConfigurationFactory getConfigurationFactory() { return myConfigurationFactory; } public ConfigurationType getConfigurationType() { return myConfigurationFactory.getType(); } public void perform(ConfigurationContext context, Runnable performRunnable){ performRunnable.run(); } public static <T extends RuntimeConfigurationProducer> T getInstance(final Class<T> aClass) { final RuntimeConfigurationProducer[] configurationProducers = Extensions.getExtensions(RUNTIME_CONFIGURATION_PRODUCER); for (RuntimeConfigurationProducer configurationProducer : configurationProducers) { if (configurationProducer.getClass() == aClass) { //noinspection unchecked return (T) configurationProducer; } } return null; } private static class ProducerComparator implements Comparator<RuntimeConfigurationProducer> { @Override public int compare(final RuntimeConfigurationProducer producer1, final RuntimeConfigurationProducer producer2) { final PsiElement psiElement1 = producer1.getSourceElement(); final PsiElement psiElement2 = producer2.getSourceElement(); if (doesContain(psiElement1, psiElement2)) return -PREFERED; if (doesContain(psiElement2, psiElement1)) return PREFERED; return producer1.compareTo(producer2); } private static boolean doesContain(final PsiElement container, PsiElement element) { while ((element = element.getParent()) != null) { if (container.equals(element)) return true; } return false; } } /** * @deprecated feel free to pass your configuration to SMTRunnerConsoleProperties directly instead of wrapping in DelegatingRuntimeConfiguration */ public static class DelegatingRuntimeConfiguration<T extends LocatableConfiguration> extends LocatableConfigurationBase implements ModuleRunConfiguration { private final T myConfig; public DelegatingRuntimeConfiguration(T config) { super(config.getProject(), config.getFactory(), config.getName()); myConfig = config; } @NotNull @Override public SettingsEditor<? extends RunConfiguration> getConfigurationEditor() { return myConfig.getConfigurationEditor(); } @SuppressWarnings({"CloneDoesntCallSuperClone"}) @Override public DelegatingRuntimeConfiguration<T> clone() { return new DelegatingRuntimeConfiguration<>((T)myConfig.clone()); } @Override public RunProfileState getState(@NotNull Executor executor, @NotNull ExecutionEnvironment env) throws ExecutionException { return myConfig.getState(executor, env); } @Override public void checkConfiguration() throws RuntimeConfigurationException { myConfig.checkConfiguration(); } @Override public String suggestedName() { return myConfig.suggestedName(); } @Override public void readExternal(Element element) throws InvalidDataException { myConfig.readExternal(element); } @Override public void writeExternal(Element element) throws WriteExternalException { myConfig.writeExternal(element); } public T getPeer() { return myConfig; } @Override @NotNull public Module[] getModules() { return Module.EMPTY_ARRAY; } } }
package com.example.avjindersinghsekhon.toodle; import android.content.Intent; import android.content.SharedPreferences; import android.graphics.Color; import android.os.Bundle; import android.support.v7.app.AppCompatActivity; import android.support.v7.widget.Toolbar; import android.util.Log; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.widget.ArrayAdapter; import android.widget.Button; import android.widget.TextView; import com.google.android.gms.analytics.HitBuilders; import com.google.android.gms.analytics.Tracker; import org.json.JSONException; import java.io.IOException; import java.util.ArrayList; import java.util.Calendar; import java.util.Date; import java.util.UUID; import fr.ganfra.materialspinner.MaterialSpinner; public class ReminderActivity extends AppCompatActivity{ private TextView mtoDoTextTextView; private Button mRemoveToDoButton; private MaterialSpinner mSnoozeSpinner; private String[] snoozeOptionsArray; private StoreRetrieveData storeRetrieveData; private ArrayList<ToDoItem> mToDoItems; private ToDoItem mItem; public static final String EXIT = "com.avjindersekon.exit"; private TextView mSnoozeTextView; String theme; AnalyticsApplication app; @Override protected void onCreate(final Bundle savedInstanceState) { app = (AnalyticsApplication)getApplication(); app.send(this); theme = getSharedPreferences(MainActivity.THEME_PREFERENCES, MODE_PRIVATE).getString(MainActivity.THEME_SAVED, MainActivity.LIGHTTHEME); if(theme.equals(MainActivity.LIGHTTHEME)){ setTheme(R.style.CustomStyle_LightTheme); } else{ setTheme(R.style.CustomStyle_DarkTheme); } super.onCreate(savedInstanceState); setContentView(R.layout.reminder_layout); storeRetrieveData = new StoreRetrieveData(this, MainActivity.FILENAME); mToDoItems = MainActivity.getLocallyStoredData(storeRetrieveData); setSupportActionBar((Toolbar)findViewById(R.id.toolbar)); Intent i = getIntent(); UUID id = (UUID)i.getSerializableExtra(TodoNotificationService.TODOUUID); mItem = null; for(ToDoItem toDoItem : mToDoItems){ if (toDoItem.getIdentifier().equals(id)){ mItem = toDoItem; break; } } snoozeOptionsArray = getResources().getStringArray(R.array.snooze_options); mRemoveToDoButton = (Button)findViewById(R.id.toDoReminderRemoveButton); mtoDoTextTextView = (TextView)findViewById(R.id.toDoReminderTextViewBody); mSnoozeTextView = (TextView)findViewById(R.id.reminderViewSnoozeTextView); mSnoozeSpinner = (MaterialSpinner)findViewById(R.id.todoReminderSnoozeSpinner); // mtoDoTextTextView.setBackgroundColor(item.getTodoColor()); mtoDoTextTextView.setText(mItem.getToDoText()); if(theme.equals(MainActivity.LIGHTTHEME)){ mSnoozeTextView.setTextColor(getResources().getColor(R.color.secondary_text)); } else{ mSnoozeTextView.setTextColor(Color.WHITE); mSnoozeTextView.setCompoundDrawablesWithIntrinsicBounds( R.drawable.ic_snooze_white_24dp,0,0,0 ); } mRemoveToDoButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { app.send(this, new HitBuilders.EventBuilder().setCategory("Action").setAction("Todo Removed from Reminder Activity").build()); mToDoItems.remove(mItem); changeOccurred(); saveData(); closeApp(); // finish(); } }); // ArrayAdapter<String> adapter = new ArrayAdapter<>(this, android.R.layout.simple_spinner_item, snoozeOptionsArray); ArrayAdapter<String> adapter = new ArrayAdapter<>(this, R.layout.spinner_text_view, snoozeOptionsArray); // adapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item); adapter.setDropDownViewResource(R.layout.spinner_dropdown_item); mSnoozeSpinner.setAdapter(adapter); // mSnoozeSpinner.setSelection(0); } private void closeApp(){ Intent i = new Intent(ReminderActivity.this, MainActivity.class); i.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP); // i.putExtra(EXIT, true); SharedPreferences sharedPreferences = getSharedPreferences(MainActivity.SHARED_PREF_DATA_SET_CHANGED, MODE_PRIVATE); SharedPreferences.Editor editor = sharedPreferences.edit(); editor.putBoolean(EXIT, true); editor.apply(); startActivity(i); } @Override public boolean onCreateOptionsMenu(Menu menu) { getMenuInflater().inflate(R.menu.menu_reminder, menu); return true; } private void changeOccurred(){ SharedPreferences sharedPreferences = getSharedPreferences(MainActivity.SHARED_PREF_DATA_SET_CHANGED, MODE_PRIVATE); SharedPreferences.Editor editor = sharedPreferences.edit(); editor.putBoolean(MainActivity.CHANGE_OCCURED, true); // editor.commit(); editor.apply(); } private Date addTimeToDate(int mins){ app.send(this, new HitBuilders.EventBuilder().setCategory("Action").setAction("Snoozed").setLabel("For "+mins+" minutes").build()); Date date = new Date(); Calendar calendar = Calendar.getInstance(); calendar.setTime(date); calendar.add(Calendar.MINUTE, mins); return calendar.getTime(); } private int valueFromSpinner(){ switch (mSnoozeSpinner.getSelectedItemPosition()){ case 0: return 10; case 1: return 30; case 2: return 60; default: return 0; } } @Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()){ case R.id.toDoReminderDoneMenuItem: Date date = addTimeToDate(valueFromSpinner()); mItem.setToDoDate(date); mItem.setHasReminder(true); Log.d("OskarSchindler", "Date Changed to: " + date); changeOccurred(); saveData(); closeApp(); //foo return true; default: return super.onOptionsItemSelected(item); } } // @Override // protected void onPause() { // super.onPause(); // try{ // storeRetrieveData.saveToFile(mToDoItems); // } // catch (JSONException | IOException e){ // e.printStackTrace(); // } // } private void saveData(){ try{ storeRetrieveData.saveToFile(mToDoItems); } catch (JSONException | IOException e){ e.printStackTrace(); } } @Override protected void onDestroy() { super.onDestroy(); // try{ // storeRetrieveData.saveToFile(mToDoItems); // } // catch (JSONException | IOException e){ // e.printStackTrace(); // } } }
package com.enderio.core.common; import java.awt.Point; import java.util.Map; import javax.annotation.Nonnull; import com.enderio.core.client.gui.widget.GhostSlot; import com.enderio.core.common.ContainerEnderCap.BaseSlotItemHandler; import com.enderio.core.common.util.NullHelper; import com.google.common.collect.Maps; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.entity.player.EntityPlayerMP; import net.minecraft.entity.player.InventoryPlayer; import net.minecraft.inventory.Container; import net.minecraft.inventory.IContainerListener; import net.minecraft.inventory.IInventory; import net.minecraft.inventory.Slot; import net.minecraft.item.ItemStack; import net.minecraft.network.play.server.SPacketUpdateTileEntity; @Deprecated public class ContainerEnder<T extends IInventory> extends Container implements GhostSlot.IGhostSlotAware { protected final @Nonnull Map<Slot, Point> playerSlotLocations = Maps.newLinkedHashMap(); protected final int startPlayerSlot; protected final int endPlayerSlot; protected final int startHotBarSlot; protected final int endHotBarSlot; private final @Nonnull T inv; private final @Nonnull InventoryPlayer playerInv; @Nonnull private static <T> T checkNotNull(T reference) { if (reference == null) { throw new NullPointerException(); } return reference; } public ContainerEnder(@Nonnull InventoryPlayer playerInv, @Nonnull T inv) { this.inv = checkNotNull(inv); this.playerInv = checkNotNull(playerInv); addSlots(this.playerInv); int x = getPlayerInventoryOffset().x; int y = getPlayerInventoryOffset().y; // add players inventory startPlayerSlot = inventorySlots.size(); for (int i = 0; i < 3; ++i) { for (int j = 0; j < 9; ++j) { Point loc = new Point(x + j * 18, y + i * 18); Slot slot = new Slot(this.playerInv, j + i * 9 + 9, loc.x, loc.y); addSlotToContainer(slot); playerSlotLocations.put(slot, loc); } } endPlayerSlot = inventorySlots.size(); startHotBarSlot = inventorySlots.size(); for (int i = 0; i < 9; ++i) { Point loc = new Point(x + i * 18, y + 58); Slot slot = new Slot(this.playerInv, i, loc.x, loc.y); addSlotToContainer(slot); playerSlotLocations.put(slot, loc); } endHotBarSlot = inventorySlots.size(); } protected void addSlots(@Nonnull InventoryPlayer playerInventory) { } public @Nonnull Point getPlayerInventoryOffset() { return new Point(8, 84); } public @Nonnull Point getUpgradeOffset() { return new Point(12, 60); } public @Nonnull T getInv() { return inv; } @Override @Nonnull public Slot getSlotFromInventory(@Nonnull IInventory invIn, int slotIn) { return NullHelper.notnull(super.getSlotFromInventory(invIn, slotIn), "Logic error, missing slot " + slotIn); } @Nonnull public Slot getSlotFromInventory(int slotIn) { return getSlotFromInventory(getInv(), slotIn); } @Override public boolean canInteractWith(@Nonnull EntityPlayer player) { return getInv().isUsableByPlayer(player); } @Override public @Nonnull ItemStack transferStackInSlot(@Nonnull EntityPlayer p_82846_1_, int p_82846_2_) { ItemStack itemstack = ItemStack.EMPTY; Slot slot = this.inventorySlots.get(p_82846_2_); if (slot != null && slot.getHasStack()) { ItemStack itemstack1 = slot.getStack(); itemstack = itemstack1.copy(); int minPlayerSlot = inventorySlots.size() - playerInv.mainInventory.size(); if (p_82846_2_ < minPlayerSlot) { if (!this.mergeItemStack(itemstack1, minPlayerSlot, this.inventorySlots.size(), true)) { return ItemStack.EMPTY; } } else if (!this.mergeItemStack(itemstack1, 0, minPlayerSlot, false)) { return ItemStack.EMPTY; } if (itemstack1.isEmpty()) { slot.putStack(ItemStack.EMPTY); } else { slot.onSlotChanged(); } } return itemstack; } /** * Added validation of slot input */ @Override protected boolean mergeItemStack(@Nonnull ItemStack par1ItemStack, int fromIndex, int toIndex, boolean reversOrder) { boolean result = false; int checkIndex = fromIndex; if (reversOrder) { checkIndex = toIndex - 1; } Slot slot; ItemStack itemstack1; if (par1ItemStack.isStackable()) { while (!par1ItemStack.isEmpty() && (!reversOrder && checkIndex < toIndex || reversOrder && checkIndex >= fromIndex)) { slot = this.inventorySlots.get(checkIndex); itemstack1 = slot.getStack(); if (isSlotEnabled(slot) && !itemstack1.isEmpty() && itemstack1.getItem() == par1ItemStack.getItem() && (!par1ItemStack.getHasSubtypes() || par1ItemStack.getItemDamage() == itemstack1.getItemDamage()) && ItemStack.areItemStackTagsEqual(par1ItemStack, itemstack1) && slot.isItemValid(par1ItemStack) && par1ItemStack != itemstack1) { int mergedSize = itemstack1.getCount() + par1ItemStack.getCount(); int maxStackSize = Math.min(par1ItemStack.getMaxStackSize(), slot.getItemStackLimit(par1ItemStack)); if (mergedSize <= maxStackSize) { par1ItemStack.setCount(0); itemstack1.setCount(mergedSize); slot.onSlotChanged(); result = true; } else if (itemstack1.getCount() < maxStackSize) { par1ItemStack.shrink(maxStackSize - itemstack1.getCount()); itemstack1.setCount(maxStackSize); slot.onSlotChanged(); result = true; } } if (reversOrder) { --checkIndex; } else { ++checkIndex; } } } if (!par1ItemStack.isEmpty()) { if (reversOrder) { checkIndex = toIndex - 1; } else { checkIndex = fromIndex; } while (!reversOrder && checkIndex < toIndex || reversOrder && checkIndex >= fromIndex) { slot = this.inventorySlots.get(checkIndex); itemstack1 = slot.getStack(); if (isSlotEnabled(slot) && itemstack1.isEmpty() && slot.isItemValid(par1ItemStack)) { ItemStack in = par1ItemStack.copy(); in.setCount(Math.min(in.getCount(), slot.getItemStackLimit(par1ItemStack))); slot.putStack(in); slot.onSlotChanged(); par1ItemStack.shrink(in.getCount()); result = in.getCount() > 0; // Sanity check for slots which have a 0-size limit, if this stack count is zero then no items were inserted and we should // return false. break; } if (reversOrder) { --checkIndex; } else { ++checkIndex; } } } return result; } @Override public void setGhostSlotContents(int slot, @Nonnull ItemStack stack, int realsize) { if (inv instanceof TileEntityBase) { ((TileEntityBase) inv).setGhostSlotContents(slot, stack, realsize); } } @Override public void detectAndSendChanges() { super.detectAndSendChanges(); if (inv instanceof TileEntityBase) { // keep in sync with ContainerEnderCap#detectAndSendChanges() final SPacketUpdateTileEntity updatePacket = ((TileEntityBase) inv).getUpdatePacket(); if (updatePacket != null) { for (IContainerListener containerListener : listeners) { if (containerListener instanceof EntityPlayerMP) { ((EntityPlayerMP) containerListener).connection.sendPacket(updatePacket); } } } } } private boolean isSlotEnabled(Slot slot) { return slot != null && (!(slot instanceof ContainerEnder.BaseSlot) || ((ContainerEnder.BaseSlot) slot).isEnabled()) && (!(slot instanceof BaseSlotItemHandler) || ((BaseSlotItemHandler) slot).isEnabled()); } public static abstract class BaseSlot extends Slot { public BaseSlot(@Nonnull IInventory inventoryIn, int index, int xPosition, int yPosition) { super(inventoryIn, index, xPosition, yPosition); } @Override public boolean isEnabled() { // don't super here, super is sided return true; } } }
package com.laytonsmith.PureUtilities; import com.laytonsmith.PureUtilities.Common.ArrayUtils; import com.laytonsmith.PureUtilities.Common.FileUtil; import com.laytonsmith.PureUtilities.Common.StreamUtils; import com.laytonsmith.PureUtilities.Common.StringUtils; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.net.URL; import java.security.AccessControlException; import java.util.ArrayList; import java.util.Deque; import java.util.LinkedList; import java.util.List; import java.util.regex.Pattern; import java.util.zip.ZipEntry; import java.util.zip.ZipException; import java.util.zip.ZipFile; import java.util.zip.ZipInputStream; /** * Allows read operations to happen transparently on a zip file, as if it were a * folder. Nested zips are also supported. All operations are read only. * Operations on a ZipReader with a path in an actual zip are expensive, so it's * good to keep in mind this when using the reader, you'll have to balance * between memory usage (caching) or CPU use (re-reading as needed). * * Smith */ public class ZipReader { /** * The top level zip file, which represents the actual file on the file system. */ private final File topZip; /** * The chain of Files that this file represents. */ private final Deque<File> chainedPath; /** * The actual file object. */ private final File file; /** * Whether or not we have to dig down into the zip, or if * we can use trivial file operations. */ private final boolean isZipped; /** * A list of zip entries, which is cached, so we don't need to re-read * the zip file each time we want to do enumerative stuff. */ private List<File> zipEntries = null; /** * The ZipEntry contains the information of whether or not the listed file is * a directory, but since we discard that information, we cache the list of directories * here. */ private List<File> zipDirectories = new ArrayList<File>(); /** * Convenience constructor, which allows for a URL to be passed in instead of a file, * which may be useful when working with resources. * @param url */ public ZipReader(URL url){ this(new File(url.getFile())); } /** * Creates a new ZipReader object, which can be used to read from a zip * file, as if the zip files were simple directories. All files are checked * to see if they are a zip. * * <p>{@code new ZipReader(new File("path/to/container.zip/with/nested.zip/file.txt"));}</p> * * * @param file The path to the internal file. This needn't exist, according * to File, as the zip file won't appear as a directory to other classes. * This constructor will however throw a FileNotFoundException if it * determines that the file doesn't exist. */ public ZipReader(File file){ chainedPath = new LinkedList<File>(); //We need to remove jar style or uri style things from the file, so do that here if(file.getPath().startsWith("file:")){ String newFile = file.getPath().substring(5); //Replace all \ with /, to simply processing, but also replace ! with /, since jar addresses //use that to denote the jar. We don't care, it's just a folder, so replace that with a slash. newFile = newFile.replace('\\', '/').replace('!', '/'); while(newFile.startsWith("//")){ //We only want up to one slash here newFile = newFile.substring(1); } file = new File(newFile); } //make sure file is absolute file = file.getAbsoluteFile(); this.file = file; //We need to walk up the parents, putting those files onto the stack which are valid Zips File f = file; chainedPath.addFirst(f); //Gotta add the file itself to the path for everything to work File tempTopZip = null; while ((f = f.getParentFile()) != null) { chainedPath.addFirst(f); try { //If this works, we'll know we have our top zip file. Everything else will have //to be in memory, so we'll start with this if we have to dig deeper. if (tempTopZip == null) { ZipFile zf = new ZipFile(f); tempTopZip = f; } } catch (ZipException ex) { //This is fine, it's just not a zip file } catch (IOException | AccessControlException ex) { //This is fine too, it may mean we don't have permission to access this directory, //but that's ok, we don't need access yet. } } //If it's not a zipped file, this will make operations easier to deal with, //so let's save that information isZipped = tempTopZip != null; if(isZipped){ topZip = tempTopZip; } else { topZip = file; } } /** * Returns the top level file for the underlying file. If this is not zipped, the file * returned will be the file this object was constructed with. Otherwise, the File * representing the actual file on the filesystem will be returned. This is mostly * useful for the case where locks need to be implemented, or to find the "root" of * the directory. * @return */ public File getTopLevelFile(){ return topZip; } /** * Returns if this file exists or not. Note this is a non-trivial operation. * * @return */ public boolean exists(){ if(!topZip.exists()){ return false; //Don't bother trying } try{ getInputStream().close(); return true; } catch(IOException e){ return false; } } /** * Returns true if this file is read accessible. Note that if the file is a zip, * the permissions are checked on the topmost zip file. * @return */ public boolean canRead(){ return topZip.canRead(); } /** * Returns true if this file has write permissions. Note that if the file is nested * in a zip, then this will always return false. If the file doesn't exist, this will * also return false, but that doesn't imply that you won't be able to create file here, * so you may also need to check isZipped(). * @return */ public boolean canWrite(){ if(isZipped){ return false; } else { return topZip.canWrite(); } } /** * Returns whether or not the file is inside of a zip file or not. * @return */ public boolean isZipped(){ return isZipped; } /* * This function recurses down into a zip file, ultimately returning the InputStream for the file, * or throwing exceptions if it can't be found. */ private InputStream getFile(Deque<File> fullChain, String zipName, final ZipInputStream zis) throws FileNotFoundException, IOException { ZipEntry entry; InputStream zipReader = new InputStream() { @Override public int read() throws IOException { if (zis.available() > 0) { return zis.read(); } else { return -1; } } @Override public void close() throws IOException { zis.close(); } }; boolean isZip = false; List<String> recurseAttempts = new ArrayList<String>(); while ((entry = zis.getNextEntry()) != null) { //This is at least a zip file isZip = true; Deque<File> chain = new LinkedList<File>(fullChain); File chainFile = null; while ((chainFile = chain.pollFirst()) != null) { if (chainFile.equals(new File(zipName + File.separator + entry.getName()))) { //We found it. Now, chainFile is one that is in our tree //We have to do some further analyzation on it break; } } if (chainFile == null) { //It's not in the chain at all, which means we don't care about it at all. continue; } if (chain.isEmpty()) { //It was the last file in the chain, so no point in looking at it at all. //If it was a zip or not, it doesn't matter, because this is the file they //specified, precisely. Read it out, and return it. return zipReader; } //It's a single file, it's in the chain, and the chain isn't finished, so that //must mean it's a container (or it's being used as one, anyways). //It could be that either this is just a folder in the entry list, or it could //mean that this is a zip. We will make note of this as one we need to attempt to //recurse, but only if it doesn't pan out that this is a file. recurseAttempts.add(zipName + File.separator + entry.getName()); } for(String recurseAttempt : recurseAttempts){ ZipInputStream inner = new ZipInputStream(zipReader); try{ return getFile(fullChain, recurseAttempt, inner); } catch(IOException e){ //We don't care if this breaks, we'll throw out own top level exception //in a moment if we got here. We still need to finish going through //out recurse attempts. } } //If we get down here, it means either we recursed into not-a-zip file, or //the file was otherwise not found if (isZip) { //if this is the terminal node in the chain, it's due to a file not found. throw new FileNotFoundException(zipName + " could not be found!"); } else { //if not, it's due to this not being a zip file throw new IOException(zipName + " is not a zip file!"); } } /** * Returns a raw input stream for this file. If you just need the string contents, * it would probably be easer to use getFileContents instead, however, this method * is necessary for accessing binary files. * @return An InputStream that will read the specified file * @throws FileNotFoundException If the file is not found * @throws IOException If you specify a file that isn't a zip file as if it were a folder */ public InputStream getInputStream() throws FileNotFoundException, IOException { if (!isZipped) { return new FileInputStream(file); } else { return getFile(chainedPath, topZip.getAbsolutePath(), new ZipInputStream(new FileInputStream(topZip))); } } /** * If the file is a simple text file, this function is your best option. It returns * the contents of the file as a string. * @return * @throws FileNotFoundException If the file is not found * @throws IOException If you specify a file that isn't a zip file as if it were a folder */ public String getFileContents() throws FileNotFoundException, IOException { if (!isZipped) { return FileUtil.read(file); } else { return StreamUtils.GetString(getInputStream()); } } /** * Delegates the equals check to the underlying File object. * @param obj * @return */ @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final ZipReader other = (ZipReader) obj; return other.file.equals(this.file); } /** * Delegates the hashCode to the underlying File object. * @return */ @Override public int hashCode() { return file.hashCode(); } @Override public String toString() { return file.toString(); } public File getFile(){ return file; } private void initList() throws IOException{ if(!isZipped){ return; } if(this.zipEntries == null){ zipEntries = new ArrayList<File>(); ZipInputStream zis = new ZipInputStream(new FileInputStream(topZip)); ZipEntry entry; while((entry = zis.getNextEntry()) != null){ File f = new File(topZip, entry.getName()); zipEntries.add(f); if(entry.isDirectory()){ zipDirectories.add(f); } } zis.close(); } } public boolean isDirectory() throws IOException{ if(!isZipped){ return file.isDirectory(); } else { initList(); return zipDirectories.contains(file); } } public String getName(){ return file.getName(); } /** * Returns a list of File objects that are subfiles or directories in * this directory. * @return * @throws IOException */ public File [] listFiles() throws IOException{ if(!isZipped){ return file.listFiles(); } else { StringUtils.Join(new String[]{}, ""); initList(); List<File> files = new ArrayList<File>(); for(File f : zipEntries){ //If the paths start with the same thing... if(f.getPath().startsWith(file.getPath())){ //...and it's not the file we're looking from to begin with... if(!file.equals(f)){ //...and it's not in a sub-sub folder of this file... if(!f.getPath().matches(Pattern.quote(file.getPath() + File.separatorChar) + "[^" + Pattern.quote(File.separator) + "]*" + Pattern.quote(File.separator) + ".*")){ //...add it to the list. String root = f.getPath().replaceFirst(Pattern.quote(file.getPath() + File.separator), ""); f = new File(root); files.add(f); } } } } return ArrayUtils.asArray(File.class, files); } } public ZipReader[] zipListFiles() throws IOException{ File[] ret = listFiles(); ZipReader[] zips = new ZipReader[ret.length]; for(int i = 0; i < ret.length; i++){ zips[i] = new ZipReader(new File(file, ret[i].getPath())); } return zips; } /** * Copies all the files from this directory to the source directory. * If create is false, and the folder doesn't already exist, and IOException * will be thrown. This is similar to an "unzip" operation. * @param dstFolder */ public void recursiveCopy(File dstFolder, boolean create) throws IOException{ if(create){ dstFolder.mkdirs(); } if(!dstFolder.isDirectory()){ throw new IOException("Destination folder is not a directory!"); } for(ZipReader r : zipListFiles()){ if(r.isDirectory()){ r.recursiveCopy(dstFolder, create); } else { File newFile = new File(dstFolder, r.file.getName()); newFile.getParentFile().mkdirs(); FileOutputStream fos = new FileOutputStream(newFile, false); StreamUtils.Copy(r.getInputStream(), fos); } } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.curator.framework; import com.google.common.collect.ImmutableList; import org.apache.curator.RetryPolicy; import org.apache.curator.connection.ClassicConnectionHandlingPolicy; import org.apache.curator.connection.ConnectionHandlingPolicy; import org.apache.curator.connection.StandardConnectionHandlingPolicy; import org.apache.curator.ensemble.EnsembleProvider; import org.apache.curator.ensemble.fixed.FixedEnsembleProvider; import org.apache.curator.framework.api.ACLProvider; import org.apache.curator.framework.api.CompressionProvider; import org.apache.curator.framework.api.CreateBuilder; import org.apache.curator.framework.api.PathAndBytesable; import org.apache.curator.framework.imps.CuratorFrameworkImpl; import org.apache.curator.framework.imps.CuratorTempFrameworkImpl; import org.apache.curator.framework.imps.DefaultACLProvider; import org.apache.curator.framework.imps.GzipCompressionProvider; import org.apache.curator.framework.schema.SchemaSet; import org.apache.curator.framework.state.ConnectionStateErrorPolicy; import org.apache.curator.framework.state.StandardConnectionStateErrorPolicy; import org.apache.curator.framework.state.ConnectionState; import org.apache.curator.utils.DefaultZookeeperFactory; import org.apache.curator.utils.ZookeeperFactory; import org.apache.zookeeper.CreateMode; import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.Watcher; import org.apache.zookeeper.ZooKeeper; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.Arrays; import java.util.List; import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; /** * Factory methods for creating framework-style clients */ public class CuratorFrameworkFactory { private static final int DEFAULT_SESSION_TIMEOUT_MS = Integer.getInteger("curator-default-session-timeout", 60 * 1000); private static final int DEFAULT_CONNECTION_TIMEOUT_MS = Integer.getInteger("curator-default-connection-timeout", 15 * 1000); private static final byte[] LOCAL_ADDRESS = getLocalAddress(); private static final CompressionProvider DEFAULT_COMPRESSION_PROVIDER = new GzipCompressionProvider(); private static final DefaultZookeeperFactory DEFAULT_ZOOKEEPER_FACTORY = new DefaultZookeeperFactory(); private static final DefaultACLProvider DEFAULT_ACL_PROVIDER = new DefaultACLProvider(); private static final long DEFAULT_INACTIVE_THRESHOLD_MS = (int)TimeUnit.MINUTES.toMillis(3); private static final int DEFAULT_CLOSE_WAIT_MS = (int)TimeUnit.SECONDS.toMillis(1); /** * Return a new builder that builds a CuratorFramework * * @return new builder */ public static Builder builder() { return new Builder(); } /** * Create a new client with default session timeout and default connection timeout * * @param connectString list of servers to connect to * @param retryPolicy retry policy to use * @return client */ public static CuratorFramework newClient(String connectString, RetryPolicy retryPolicy) { return newClient(connectString, DEFAULT_SESSION_TIMEOUT_MS, DEFAULT_CONNECTION_TIMEOUT_MS, retryPolicy); } /** * Create a new client * * @param connectString list of servers to connect to * @param sessionTimeoutMs session timeout * @param connectionTimeoutMs connection timeout * @param retryPolicy retry policy to use * @return client */ public static CuratorFramework newClient(String connectString, int sessionTimeoutMs, int connectionTimeoutMs, RetryPolicy retryPolicy) { return builder(). connectString(connectString). sessionTimeoutMs(sessionTimeoutMs). connectionTimeoutMs(connectionTimeoutMs). retryPolicy(retryPolicy). build(); } /** * Return the local address as bytes that can be used as a node payload * * @return local address bytes */ public static byte[] getLocalAddress() { try { return InetAddress.getLocalHost().getHostAddress().getBytes(); } catch ( UnknownHostException ignore ) { // ignore } return new byte[0]; } public static class Builder { private EnsembleProvider ensembleProvider; private int sessionTimeoutMs = DEFAULT_SESSION_TIMEOUT_MS; private int connectionTimeoutMs = DEFAULT_CONNECTION_TIMEOUT_MS; private int maxCloseWaitMs = DEFAULT_CLOSE_WAIT_MS; private RetryPolicy retryPolicy; private ThreadFactory threadFactory = null; private String namespace; private List<AuthInfo> authInfos = null; private byte[] defaultData = LOCAL_ADDRESS; private CompressionProvider compressionProvider = DEFAULT_COMPRESSION_PROVIDER; private ZookeeperFactory zookeeperFactory = DEFAULT_ZOOKEEPER_FACTORY; private ACLProvider aclProvider = DEFAULT_ACL_PROVIDER; private boolean canBeReadOnly = false; private boolean useContainerParentsIfAvailable = true; private ConnectionStateErrorPolicy connectionStateErrorPolicy = new StandardConnectionStateErrorPolicy(); private ConnectionHandlingPolicy connectionHandlingPolicy = Boolean.getBoolean("curator-use-classic-connection-handling") ? new ClassicConnectionHandlingPolicy() : new StandardConnectionHandlingPolicy(); private SchemaSet schemaSet = SchemaSet.getDefaultSchemaSet(); /** * Apply the current values and build a new CuratorFramework * * @return new CuratorFramework */ public CuratorFramework build() { return new CuratorFrameworkImpl(this); } /** * Apply the current values and build a new temporary CuratorFramework. Temporary * CuratorFramework instances are meant for single requests to ZooKeeper ensembles * over a failure prone network such as a WAN. The APIs available from {@link CuratorTempFramework} * are limited. Further, the connection will be closed after 3 minutes of inactivity. * * @return temp instance */ public CuratorTempFramework buildTemp() { return buildTemp(DEFAULT_INACTIVE_THRESHOLD_MS, TimeUnit.MILLISECONDS); } /** * Apply the current values and build a new temporary CuratorFramework. Temporary * CuratorFramework instances are meant for single requests to ZooKeeper ensembles * over a failure prone network such as a WAN. The APIs available from {@link CuratorTempFramework} * are limited. Further, the connection will be closed after <code>inactiveThresholdMs</code> milliseconds of inactivity. * * @param inactiveThreshold number of milliseconds of inactivity to cause connection close * @param unit threshold unit * @return temp instance */ public CuratorTempFramework buildTemp(long inactiveThreshold, TimeUnit unit) { return new CuratorTempFrameworkImpl(this, unit.toMillis(inactiveThreshold)); } /** * Add connection authorization * * Subsequent calls to this method overwrite the prior calls. * * @param scheme the scheme * @param auth the auth bytes * @return this */ public Builder authorization(String scheme, byte[] auth) { return authorization(ImmutableList.of(new AuthInfo(scheme, (auth != null) ? Arrays.copyOf(auth, auth.length) : null))); } /** * Add connection authorization. The supplied authInfos are appended to those added via call to * {@link #authorization(java.lang.String, byte[])} for backward compatibility. * <p/> * Subsequent calls to this method overwrite the prior calls. * * @param authInfos list of {@link AuthInfo} objects with scheme and auth * @return this */ public Builder authorization(List<AuthInfo> authInfos) { this.authInfos = ImmutableList.copyOf(authInfos); return this; } /** * Set the list of servers to connect to. IMPORTANT: use either this or {@link #ensembleProvider(EnsembleProvider)} * but not both. * * @param connectString list of servers to connect to * @return this */ public Builder connectString(String connectString) { ensembleProvider = new FixedEnsembleProvider(connectString); return this; } /** * Set the list ensemble provider. IMPORTANT: use either this or {@link #connectString(String)} * but not both. * * @param ensembleProvider the ensemble provider to use * @return this */ public Builder ensembleProvider(EnsembleProvider ensembleProvider) { this.ensembleProvider = ensembleProvider; return this; } /** * Sets the data to use when {@link PathAndBytesable#forPath(String)} is used. * This is useful for debugging purposes. For example, you could set this to be the IP of the * client. * * @param defaultData new default data to use * @return this */ public Builder defaultData(byte[] defaultData) { this.defaultData = (defaultData != null) ? Arrays.copyOf(defaultData, defaultData.length) : null; return this; } /** * As ZooKeeper is a shared space, users of a given cluster should stay within * a pre-defined namespace. If a namespace is set here, all paths will get pre-pended * with the namespace * * @param namespace the namespace * @return this */ public Builder namespace(String namespace) { this.namespace = namespace; return this; } /** * @param sessionTimeoutMs session timeout * @return this */ public Builder sessionTimeoutMs(int sessionTimeoutMs) { this.sessionTimeoutMs = sessionTimeoutMs; return this; } /** * @param connectionTimeoutMs connection timeout * @return this */ public Builder connectionTimeoutMs(int connectionTimeoutMs) { this.connectionTimeoutMs = connectionTimeoutMs; return this; } /** * @param maxCloseWaitMs time to wait during close to join background threads * @return this */ public Builder maxCloseWaitMs(int maxCloseWaitMs) { this.maxCloseWaitMs = maxCloseWaitMs; return this; } /** * @param retryPolicy retry policy to use * @return this */ public Builder retryPolicy(RetryPolicy retryPolicy) { this.retryPolicy = retryPolicy; return this; } /** * @param threadFactory thread factory used to create Executor Services * @return this */ public Builder threadFactory(ThreadFactory threadFactory) { this.threadFactory = threadFactory; return this; } /** * @param compressionProvider the compression provider * @return this */ public Builder compressionProvider(CompressionProvider compressionProvider) { this.compressionProvider = compressionProvider; return this; } /** * @param zookeeperFactory the zookeeper factory to use * @return this */ public Builder zookeeperFactory(ZookeeperFactory zookeeperFactory) { this.zookeeperFactory = zookeeperFactory; return this; } /** * @param aclProvider a provider for ACLs * @return this */ public Builder aclProvider(ACLProvider aclProvider) { this.aclProvider = aclProvider; return this; } /** * @param canBeReadOnly if true, allow ZooKeeper client to enter * read only mode in case of a network partition. See * {@link ZooKeeper#ZooKeeper(String, int, Watcher, long, byte[], boolean)} * for details * @return this */ public Builder canBeReadOnly(boolean canBeReadOnly) { this.canBeReadOnly = canBeReadOnly; return this; } /** * By default, Curator uses {@link CreateBuilder#creatingParentContainersIfNeeded()} * if the ZK JAR supports {@link CreateMode#CONTAINER}. Call this method to turn off this behavior. * * @return this */ public Builder dontUseContainerParents() { this.useContainerParentsIfAvailable = false; return this; } /** * Set the error policy to use. The default is {@link StandardConnectionStateErrorPolicy} * * @since 3.0.0 * @param connectionStateErrorPolicy new error policy * @return this */ public Builder connectionStateErrorPolicy(ConnectionStateErrorPolicy connectionStateErrorPolicy) { this.connectionStateErrorPolicy = connectionStateErrorPolicy; return this; } /** * <p> * Change the connection handling policy. The default policy is {@link StandardConnectionHandlingPolicy}. * </p> * <p> * <strong>IMPORTANT: </strong> StandardConnectionHandlingPolicy has different behavior than the connection * policy handling prior to version 3.0.0. You can specify that the connection handling be the method * prior to 3.0.0 by passing in an instance of {@link ClassicConnectionHandlingPolicy} here or by * setting the command line value "curator-use-classic-connection-handling" to true (e.g. <tt>-Dcurator-use-classic-connection-handling=true</tt>). * </p> * <p> * Major differences from the older behavior are: * </p> * <ul> * <li> * Session/connection timeouts are no longer managed by the low-level client. They are managed * by the CuratorFramework instance. There should be no noticeable differences. * </li> * <li> * Prior to 3.0.0, each iteration of the retry policy would allow the connection timeout to elapse * if the connection hadn't yet succeeded. This meant that the true connection timeout was the configured * value times the maximum retries in the retry policy. This longstanding issue has been address. * Now, the connection timeout can elapse only once for a single API call. * </li> * <li> * <strong>MOST IMPORTANTLY!</strong> Prior to 3.0.0, {@link ConnectionState#LOST} did not imply * a lost session (much to the confusion of users). Now, * Curator will set the LOST state only when it believes that the ZooKeeper session * has expired. ZooKeeper connections have a session. When the session expires, clients must take appropriate * action. In Curator, this is complicated by the fact that Curator internally manages the ZooKeeper * connection. Now, Curator will set the LOST state when any of the following occurs: * a) ZooKeeper returns a {@link Watcher.Event.KeeperState#Expired} or {@link KeeperException.Code#SESSIONEXPIRED}; * b) Curator closes the internally managed ZooKeeper instance; c) The session timeout * elapses during a network partition. * </li> * </ul> * * @param connectionHandlingPolicy the policy * @return this * @since 3.0.0 */ public Builder connectionHandlingPolicy(ConnectionHandlingPolicy connectionHandlingPolicy) { this.connectionHandlingPolicy = connectionHandlingPolicy; return this; } /** * Add an enforced schema set * * @param schemaSet the schema set * @return this * @since 3.2.0 */ public Builder schemaSet(SchemaSet schemaSet) { this.schemaSet = schemaSet; return this; } public ACLProvider getAclProvider() { return aclProvider; } public ZookeeperFactory getZookeeperFactory() { return zookeeperFactory; } public CompressionProvider getCompressionProvider() { return compressionProvider; } public ThreadFactory getThreadFactory() { return threadFactory; } public EnsembleProvider getEnsembleProvider() { return ensembleProvider; } public int getSessionTimeoutMs() { return sessionTimeoutMs; } public int getConnectionTimeoutMs() { return connectionTimeoutMs; } public int getMaxCloseWaitMs() { return maxCloseWaitMs; } public RetryPolicy getRetryPolicy() { return retryPolicy; } public String getNamespace() { return namespace; } public boolean useContainerParentsIfAvailable() { return useContainerParentsIfAvailable; } public ConnectionStateErrorPolicy getConnectionStateErrorPolicy() { return connectionStateErrorPolicy; } public ConnectionHandlingPolicy getConnectionHandlingPolicy() { return connectionHandlingPolicy; } public SchemaSet getSchemaSet() { return schemaSet; } @Deprecated public String getAuthScheme() { int qty = (authInfos != null) ? authInfos.size() : 0; switch ( qty ) { case 0: { return null; } case 1: { return authInfos.get(0).scheme; } default: { throw new IllegalStateException("More than 1 auth has been added"); } } } @Deprecated public byte[] getAuthValue() { int qty = (authInfos != null) ? authInfos.size() : 0; switch ( qty ) { case 0: { return null; } case 1: { byte[] bytes = authInfos.get(0).getAuth(); return (bytes != null) ? Arrays.copyOf(bytes, bytes.length) : null; } default: { throw new IllegalStateException("More than 1 auth has been added"); } } } public List<AuthInfo> getAuthInfos() { return authInfos; } public byte[] getDefaultData() { return defaultData; } public boolean canBeReadOnly() { return canBeReadOnly; } private Builder() { } } private CuratorFrameworkFactory() { } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.fs; import java.io.FileNotFoundException; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.util.Arrays; import java.util.EnumSet; import java.util.List; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Options.ChecksumOpt; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.util.Progressable; /** * Implementation of AbstractFileSystem based on the existing implementation of * {@link FileSystem}. */ @InterfaceAudience.Private @InterfaceStability.Unstable public abstract class DelegateToFileSystem extends AbstractFileSystem { private static final int DELEGATE_TO_FS_DEFAULT_PORT = -1; protected final FileSystem fsImpl; protected DelegateToFileSystem(URI theUri, FileSystem theFsImpl, Configuration conf, String supportedScheme, boolean authorityRequired) throws IOException, URISyntaxException { super(theUri, supportedScheme, authorityRequired, getDefaultPortIfDefined(theFsImpl)); fsImpl = theFsImpl; fsImpl.initialize(theUri, conf); fsImpl.statistics = getStatistics(); } /** * Returns the default port if the file system defines one. * {@link FileSystem#getDefaultPort()} returns 0 to indicate the default port * is undefined. However, the logic that consumes this value expects to * receive -1 to indicate the port is undefined, which agrees with the * contract of {@link URI#getPort()}. * * @param theFsImpl file system to check for default port * @return default port, or -1 if default port is undefined */ private static int getDefaultPortIfDefined(FileSystem theFsImpl) { int defaultPort = theFsImpl.getDefaultPort(); return defaultPort != 0 ? defaultPort : DELEGATE_TO_FS_DEFAULT_PORT; } @Override public Path getInitialWorkingDirectory() { return fsImpl.getInitialWorkingDirectory(); } @Override @SuppressWarnings("deprecation") // call to primitiveCreate public FSDataOutputStream createInternal (Path f, EnumSet<CreateFlag> flag, FsPermission absolutePermission, int bufferSize, short replication, long blockSize, Progressable progress, ChecksumOpt checksumOpt, boolean createParent) throws IOException { checkPath(f); // Default impl assumes that permissions do not matter // calling the regular create is good enough. // FSs that implement permissions should override this. if (!createParent) { // parent must exist. // since this.create makes parent dirs automatically // we must throw exception if parent does not exist. final FileStatus stat = getFileStatus(f.getParent()); if (stat == null) { throw new FileNotFoundException("Missing parent:" + f); } if (!stat.isDirectory()) { throw new ParentNotDirectoryException("parent is not a dir:" + f); } // parent does exist - go ahead with create of file. } return fsImpl.primitiveCreate(f, absolutePermission, flag, bufferSize, replication, blockSize, progress, checksumOpt); } @Override public boolean delete(Path f, boolean recursive) throws IOException { checkPath(f); return fsImpl.delete(f, recursive); } @Override public BlockLocation[] getFileBlockLocations(Path f, long start, long len) throws IOException { checkPath(f); return fsImpl.getFileBlockLocations(f, start, len); } @Override public FileChecksum getFileChecksum(Path f) throws IOException { checkPath(f); return fsImpl.getFileChecksum(f); } @Override public FileStatus getFileStatus(Path f) throws IOException { checkPath(f); return fsImpl.getFileStatus(f); } @Override public FileStatus getFileLinkStatus(final Path f) throws IOException { FileStatus status = fsImpl.getFileLinkStatus(f); // FileSystem#getFileLinkStatus qualifies the link target // AbstractFileSystem needs to return it plain since it's qualified // in FileContext, so re-get and set the plain target if (status.isSymlink()) { status.setSymlink(fsImpl.getLinkTarget(f)); } return status; } @Override public FsStatus getFsStatus() throws IOException { return fsImpl.getStatus(); } @Override public FsStatus getFsStatus(final Path f) throws IOException { return fsImpl.getStatus(f); } @Override @Deprecated public FsServerDefaults getServerDefaults() throws IOException { return fsImpl.getServerDefaults(); } @Override public FsServerDefaults getServerDefaults(final Path f) throws IOException { return fsImpl.getServerDefaults(f); } @Override public Path getHomeDirectory() { return fsImpl.getHomeDirectory(); } @Override public int getUriDefaultPort() { return getDefaultPortIfDefined(fsImpl); } @Override public FileStatus[] listStatus(Path f) throws IOException { checkPath(f); return fsImpl.listStatus(f); } @Override @SuppressWarnings("deprecation") // call to primitiveMkdir public void mkdir(Path dir, FsPermission permission, boolean createParent) throws IOException { checkPath(dir); fsImpl.primitiveMkdir(dir, permission, createParent); } @Override public FSDataInputStream open(Path f, int bufferSize) throws IOException { checkPath(f); return fsImpl.open(f, bufferSize); } @Override public boolean truncate(Path f, long newLength) throws IOException { checkPath(f); return fsImpl.truncate(f, newLength); } @Override @SuppressWarnings("deprecation") // call to rename public void renameInternal(Path src, Path dst) throws IOException { checkPath(src); checkPath(dst); fsImpl.rename(src, dst, Options.Rename.NONE); } @Override public void setOwner(Path f, String username, String groupname) throws IOException { checkPath(f); fsImpl.setOwner(f, username, groupname); } @Override public void setPermission(Path f, FsPermission permission) throws IOException { checkPath(f); fsImpl.setPermission(f, permission); } @Override public boolean setReplication(Path f, short replication) throws IOException { checkPath(f); return fsImpl.setReplication(f, replication); } @Override public void setTimes(Path f, long mtime, long atime) throws IOException { checkPath(f); fsImpl.setTimes(f, mtime, atime); } @Override public void setVerifyChecksum(boolean verifyChecksum) throws IOException { fsImpl.setVerifyChecksum(verifyChecksum); } @Override public boolean supportsSymlinks() { return fsImpl.supportsSymlinks(); } @Override public void createSymlink(Path target, Path link, boolean createParent) throws IOException { fsImpl.createSymlink(target, link, createParent); } @Override public Path getLinkTarget(final Path f) throws IOException { return fsImpl.getLinkTarget(f); } @Override //AbstractFileSystem public String getCanonicalServiceName() { return fsImpl.getCanonicalServiceName(); } @Override //AbstractFileSystem public List<Token<?>> getDelegationTokens(String renewer) throws IOException { return Arrays.asList(fsImpl.addDelegationTokens(renewer, null)); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.drill.exec.server; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import org.apache.drill.common.AutoCloseables; import org.apache.drill.common.StackTrace; import org.apache.drill.common.config.DrillConfig; import org.apache.drill.exec.ExecConstants; import org.apache.drill.exec.coord.ClusterCoordinator; import org.apache.drill.exec.coord.ClusterCoordinator.RegistrationHandle; import org.apache.drill.exec.coord.zk.ZKClusterCoordinator; import org.apache.drill.exec.exception.DrillbitStartupException; import org.apache.drill.exec.proto.CoordinationProtos.DrillbitEndpoint; import org.apache.drill.exec.server.options.OptionManager; import org.apache.drill.exec.server.options.OptionValue; import org.apache.drill.exec.server.options.OptionValue.OptionType; import org.apache.drill.exec.server.rest.DrillRestServer; import org.apache.drill.exec.service.ServiceEngine; import org.apache.drill.exec.store.sys.CachingStoreProvider; import org.apache.drill.exec.store.sys.PStoreProvider; import org.apache.drill.exec.store.sys.PStoreRegistry; import org.apache.drill.exec.store.sys.local.LocalPStoreProvider; import org.apache.drill.exec.work.WorkManager; import org.apache.zookeeper.Environment; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.handler.ErrorHandler; import org.eclipse.jetty.servlet.DefaultServlet; import org.eclipse.jetty.servlet.ServletContextHandler; import org.eclipse.jetty.servlet.ServletHolder; import org.eclipse.jetty.util.resource.Resource; import org.glassfish.jersey.servlet.ServletContainer; import com.codahale.metrics.servlets.MetricsServlet; import com.codahale.metrics.servlets.ThreadDumpServlet; import com.google.common.base.Stopwatch; /** * Starts, tracks and stops all the required services for a Drillbit daemon to work. */ public class Drillbit implements AutoCloseable { private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(Drillbit.class); static { Environment.logEnv("Drillbit environment: ", logger); } private boolean isClosed = false; public static Drillbit start(final StartupOptions options) throws DrillbitStartupException { return start(DrillConfig.create(options.getConfigLocation()), null); } public static Drillbit start(final DrillConfig config) throws DrillbitStartupException { return start(config, null); } public static Drillbit start(final DrillConfig config, final RemoteServiceSet remoteServiceSet) throws DrillbitStartupException { logger.debug("Starting new Drillbit."); Drillbit bit; try { bit = new Drillbit(config, remoteServiceSet); } catch (final Exception ex) { throw new DrillbitStartupException("Failure while initializing values in Drillbit.", ex); } try { bit.run(); } catch (final Exception e) { bit.close(); throw new DrillbitStartupException("Failure during initial startup of Drillbit.", e); } logger.debug("Started new Drillbit."); return bit; } private final static String SYSTEM_OPTIONS_NAME = "org.apache.drill.exec.server.Drillbit.system_options"; private static void throwInvalidSystemOption(final String systemProp, final String errorMessage) { throw new IllegalStateException("Property \"" + SYSTEM_OPTIONS_NAME + "\" part \"" + systemProp + "\" " + errorMessage + "."); } private static String stripQuotes(final String s, final String systemProp) { if (s.isEmpty()) { return s; } final char cFirst = s.charAt(0); final char cLast = s.charAt(s.length() - 1); if ((cFirst == '"') || (cFirst == '\'')) { if (cLast != cFirst) { throwInvalidSystemOption(systemProp, "quoted value does not have closing quote"); } return s.substring(1, s.length() - 2); // strip the quotes } if ((cLast == '"') || (cLast == '\'')) { throwInvalidSystemOption(systemProp, "value has unbalanced closing quote"); } // return as-is return s; } private void javaPropertiesToSystemOptions() { // get the system options property final String allSystemProps = System.getProperty(SYSTEM_OPTIONS_NAME); if ((allSystemProps == null) || allSystemProps.isEmpty()) { return; } final OptionManager optionManager = getContext().getOptionManager(); // parse out the properties, validate, and then set them final String systemProps[] = allSystemProps.split(","); for(final String systemProp : systemProps) { final String keyValue[] = systemProp.split("="); if (keyValue.length != 2) { throwInvalidSystemOption(systemProp, "does not contain a key=value assignment"); } final String optionName = keyValue[0].trim(); if (optionName.isEmpty()) { throwInvalidSystemOption(systemProp, "does not contain a key before the assignment"); } final String optionString = stripQuotes(keyValue[1].trim(), systemProp); if (optionString.isEmpty()) { throwInvalidSystemOption(systemProp, "does not contain a value after the assignment"); } final OptionValue defaultValue = optionManager.getOption(optionName); if (defaultValue == null) { throwInvalidSystemOption(systemProp, "does not specify a valid option name"); } if (defaultValue.type != OptionType.SYSTEM) { throwInvalidSystemOption(systemProp, "does not specify a SYSTEM option "); } final OptionValue optionValue = OptionValue.createOption( defaultValue.kind, OptionType.SYSTEM, optionName, optionString); optionManager.setOption(optionValue); } } public static void main(final String[] cli) throws DrillbitStartupException { final StartupOptions options = StartupOptions.parse(cli); start(options); } private final ClusterCoordinator coord; private final ServiceEngine engine; private final PStoreProvider storeProvider; private final WorkManager manager; private final BootStrapContext context; private final Server embeddedJetty; private RegistrationHandle registrationHandle; public Drillbit(final DrillConfig config, final RemoteServiceSet serviceSet) throws Exception { final Stopwatch w = new Stopwatch().start(); logger.debug("Construction started."); final boolean allowPortHunting = serviceSet != null; final boolean enableHttp = config.getBoolean(ExecConstants.HTTP_ENABLE); context = new BootStrapContext(config); manager = new WorkManager(context); engine = new ServiceEngine(manager.getControlMessageHandler(), manager.getUserWorker(), context, manager.getWorkBus(), manager.getDataHandler(), allowPortHunting); if (enableHttp) { embeddedJetty = new Server(config.getInt(ExecConstants.HTTP_PORT)); } else { embeddedJetty = null; } if (serviceSet != null) { coord = serviceSet.getCoordinator(); storeProvider = new CachingStoreProvider(new LocalPStoreProvider(config)); } else { coord = new ZKClusterCoordinator(config); storeProvider = new PStoreRegistry(this.coord, config).newPStoreProvider(); } logger.info("Construction completed ({} ms).", w.elapsed(TimeUnit.MILLISECONDS)); } private void startJetty() throws Exception { if (embeddedJetty == null) { return; } final ErrorHandler errorHandler = new ErrorHandler(); errorHandler.setShowStacks(true); errorHandler.setShowMessageInTitle(true); final ServletContextHandler servletContextHandler = new ServletContextHandler(ServletContextHandler.NO_SESSIONS); servletContextHandler.setErrorHandler(errorHandler); servletContextHandler.setContextPath("/"); embeddedJetty.setHandler(servletContextHandler); final ServletHolder servletHolder = new ServletHolder(new ServletContainer(new DrillRestServer(manager))); // servletHolder.setInitParameter(ServerProperties.PROVIDER_PACKAGES, "org.apache.drill.exec.server"); servletHolder.setInitOrder(1); servletContextHandler.addServlet(servletHolder, "/*"); servletContextHandler.addServlet( new ServletHolder(new MetricsServlet(context.getMetrics())), "/status/metrics"); servletContextHandler.addServlet(new ServletHolder(new ThreadDumpServlet()), "/status/threads"); final ServletHolder staticHolder = new ServletHolder("static", DefaultServlet.class); staticHolder.setInitParameter("resourceBase", Resource.newClassPathResource("/rest/static").toString()); staticHolder.setInitParameter("dirAllowed","false"); staticHolder.setInitParameter("pathInfoOnly","true"); servletContextHandler.addServlet(staticHolder,"/static/*"); embeddedJetty.start(); } public void run() throws Exception { final Stopwatch w = new Stopwatch().start(); logger.debug("Startup begun."); coord.start(10000); storeProvider.start(); final DrillbitEndpoint md = engine.start(); manager.start(md, engine.getController(), engine.getDataConnectionCreator(), coord, storeProvider); final DrillbitContext drillbitContext = manager.getContext(); drillbitContext.getStorage().init(); drillbitContext.getOptionManager().init(); javaPropertiesToSystemOptions(); registrationHandle = coord.register(md); startJetty(); Runtime.getRuntime().addShutdownHook(new ShutdownThread(this, new StackTrace())); logger.info("Startup completed ({} ms).", w.elapsed(TimeUnit.MILLISECONDS)); } @Override public synchronized void close() { // avoid complaints about double closing if (isClosed) { return; } final Stopwatch w = new Stopwatch().start(); logger.debug("Shutdown begun."); // wait for anything that is running to complete manager.waitToExit(); if (coord != null && registrationHandle != null) { coord.unregister(registrationHandle); } try { Thread.sleep(context.getConfig().getInt(ExecConstants.ZK_REFRESH) * 2); } catch (final InterruptedException e) { logger.warn("Interrupted while sleeping during coordination deregistration."); // Preserve evidence that the interruption occurred so that code higher up on the call stack can learn of the // interruption and respond to it if it wants to. Thread.currentThread().interrupt(); } if (embeddedJetty != null) { try { embeddedJetty.stop(); } catch (final Exception e) { logger.warn("Failure while shutting down embedded jetty server."); } } // TODO these should use a DeferredException AutoCloseables.close(engine, logger); AutoCloseables.close(storeProvider, logger); AutoCloseables.close(coord, logger); AutoCloseables.close(manager, logger); AutoCloseables.close(context, logger); logger.info("Shutdown completed ({} ms).", w.elapsed(TimeUnit.MILLISECONDS)); isClosed = true; } /** * Shutdown hook for Drillbit. Closes the drillbit, and reports on errors that * occur during closure, as well as the location the drillbit was started from. */ private static class ShutdownThread extends Thread { private final static AtomicInteger idCounter = new AtomicInteger(0); private final Drillbit drillbit; private final StackTrace stackTrace; /** * Constructor. * * @param drillbit the drillbit to close down * @param stackTrace the stack trace from where the Drillbit was started; * use new StackTrace() to generate this */ public ShutdownThread(final Drillbit drillbit, final StackTrace stackTrace) { this.drillbit = drillbit; this.stackTrace = stackTrace; /* * TODO should we try to determine a test class name? * See https://blogs.oracle.com/tor/entry/how_to_determine_the_junit */ setName("Drillbit-ShutdownHook#" + idCounter.getAndIncrement()); } @Override public void run() { logger.info("Received shutdown request."); try { /* * We can avoid metrics deregistration concurrency issues by only closing * one drillbit at a time. To enforce that, we synchronize on a convenient * singleton object. */ synchronized(idCounter) { drillbit.close(); } } catch(final Exception e) { throw new RuntimeException("Caught exception closing Drillbit started from\n" + stackTrace, e); } } } public DrillbitContext getContext() { return manager.getContext(); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.prestosql.sql.gen; import com.google.common.base.VerifyException; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.primitives.Primitives; import io.airlift.bytecode.Access; import io.airlift.bytecode.BytecodeBlock; import io.airlift.bytecode.BytecodeNode; import io.airlift.bytecode.ClassDefinition; import io.airlift.bytecode.FieldDefinition; import io.airlift.bytecode.MethodDefinition; import io.airlift.bytecode.Parameter; import io.airlift.bytecode.ParameterizedType; import io.airlift.bytecode.Scope; import io.airlift.bytecode.Variable; import io.airlift.bytecode.expression.BytecodeExpression; import io.prestosql.metadata.Metadata; import io.prestosql.operator.aggregation.AccumulatorCompiler; import io.prestosql.operator.aggregation.LambdaProvider; import io.prestosql.spi.connector.ConnectorSession; import io.prestosql.sql.relational.CallExpression; import io.prestosql.sql.relational.ConstantExpression; import io.prestosql.sql.relational.InputReferenceExpression; import io.prestosql.sql.relational.LambdaDefinitionExpression; import io.prestosql.sql.relational.RowExpression; import io.prestosql.sql.relational.RowExpressionVisitor; import io.prestosql.sql.relational.SpecialForm; import io.prestosql.sql.relational.VariableReferenceExpression; import org.objectweb.asm.Handle; import org.objectweb.asm.Opcodes; import org.objectweb.asm.Type; import java.lang.reflect.Method; import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.Set; import static com.google.common.collect.ImmutableList.toImmutableList; import static io.airlift.bytecode.Access.PRIVATE; import static io.airlift.bytecode.Access.PUBLIC; import static io.airlift.bytecode.Access.a; import static io.airlift.bytecode.Parameter.arg; import static io.airlift.bytecode.ParameterizedType.type; import static io.airlift.bytecode.expression.BytecodeExpressions.constantFalse; import static io.airlift.bytecode.expression.BytecodeExpressions.invokeDynamic; import static io.prestosql.spi.StandardErrorCode.COMPILER_ERROR; import static io.prestosql.spi.StandardErrorCode.NOT_SUPPORTED; import static io.prestosql.sql.gen.BytecodeUtils.boxPrimitiveIfNecessary; import static io.prestosql.sql.gen.BytecodeUtils.unboxPrimitiveIfNecessary; import static io.prestosql.sql.gen.LambdaCapture.LAMBDA_CAPTURE_METHOD; import static io.prestosql.sql.gen.LambdaExpressionExtractor.extractLambdaExpressions; import static io.prestosql.util.CompilerUtils.defineClass; import static io.prestosql.util.CompilerUtils.makeClassName; import static io.prestosql.util.Failures.checkCondition; import static java.util.Objects.requireNonNull; import static org.objectweb.asm.Type.getMethodType; import static org.objectweb.asm.Type.getType; public final class LambdaBytecodeGenerator { private LambdaBytecodeGenerator() {} public static Map<LambdaDefinitionExpression, CompiledLambda> generateMethodsForLambda( ClassDefinition containerClassDefinition, CallSiteBinder callSiteBinder, CachedInstanceBinder cachedInstanceBinder, RowExpression expression, Metadata metadata) { Set<LambdaDefinitionExpression> lambdaExpressions = ImmutableSet.copyOf(extractLambdaExpressions(expression)); ImmutableMap.Builder<LambdaDefinitionExpression, CompiledLambda> compiledLambdaMap = ImmutableMap.builder(); int counter = 0; for (LambdaDefinitionExpression lambdaExpression : lambdaExpressions) { CompiledLambda compiledLambda = LambdaBytecodeGenerator.preGenerateLambdaExpression( lambdaExpression, "lambda_" + counter, containerClassDefinition, compiledLambdaMap.build(), callSiteBinder, cachedInstanceBinder, metadata); compiledLambdaMap.put(lambdaExpression, compiledLambda); counter++; } return compiledLambdaMap.build(); } /** * @return a MethodHandle field that represents the lambda expression */ public static CompiledLambda preGenerateLambdaExpression( LambdaDefinitionExpression lambdaExpression, String methodName, ClassDefinition classDefinition, Map<LambdaDefinitionExpression, CompiledLambda> compiledLambdaMap, CallSiteBinder callSiteBinder, CachedInstanceBinder cachedInstanceBinder, Metadata metadata) { ImmutableList.Builder<Parameter> parameters = ImmutableList.builder(); ImmutableMap.Builder<String, ParameterAndType> parameterMapBuilder = ImmutableMap.builder(); parameters.add(arg("session", ConnectorSession.class)); for (int i = 0; i < lambdaExpression.getArguments().size(); i++) { Class<?> type = Primitives.wrap(lambdaExpression.getArgumentTypes().get(i).getJavaType()); String argumentName = lambdaExpression.getArguments().get(i); Parameter arg = arg("lambda_" + i + "_" + BytecodeUtils.sanitizeName(argumentName), type); parameters.add(arg); parameterMapBuilder.put(argumentName, new ParameterAndType(arg, type)); } RowExpressionCompiler innerExpressionCompiler = new RowExpressionCompiler( callSiteBinder, cachedInstanceBinder, variableReferenceCompiler(parameterMapBuilder.build()), metadata, compiledLambdaMap); return defineLambdaMethod( innerExpressionCompiler, classDefinition, methodName, parameters.build(), lambdaExpression); } private static CompiledLambda defineLambdaMethod( RowExpressionCompiler innerExpressionCompiler, ClassDefinition classDefinition, String methodName, List<Parameter> inputParameters, LambdaDefinitionExpression lambda) { checkCondition(inputParameters.size() <= 254, NOT_SUPPORTED, "Too many arguments for lambda expression"); Class<?> returnType = Primitives.wrap(lambda.getBody().getType().getJavaType()); MethodDefinition method = classDefinition.declareMethod(a(PUBLIC), methodName, type(returnType), inputParameters); Scope scope = method.getScope(); Variable wasNull = scope.declareVariable(boolean.class, "wasNull"); BytecodeNode compiledBody = innerExpressionCompiler.compile(lambda.getBody(), scope); method.getBody() .putVariable(wasNull, false) .append(compiledBody) .append(boxPrimitiveIfNecessary(scope, returnType)) .ret(returnType); Handle lambdaAsmHandle = new Handle( Opcodes.H_INVOKEVIRTUAL, method.getThis().getType().getClassName(), method.getName(), method.getMethodDescriptor(), false); return new CompiledLambda( lambdaAsmHandle, method.getReturnType(), method.getParameterTypes()); } public static BytecodeNode generateLambda( BytecodeGeneratorContext context, List<RowExpression> captureExpressions, CompiledLambda compiledLambda, Class<?> lambdaInterface) { if (!lambdaInterface.isAnnotationPresent(FunctionalInterface.class)) { // lambdaInterface is checked to be annotated with FunctionalInterface when generating ScalarFunctionImplementation throw new VerifyException("lambda should be generated as class annotated with FunctionalInterface"); } BytecodeBlock block = new BytecodeBlock().setDescription("Partial apply"); Scope scope = context.getScope(); Variable wasNull = scope.getVariable("wasNull"); // generate values to be captured ImmutableList.Builder<BytecodeExpression> captureVariableBuilder = ImmutableList.builder(); for (RowExpression captureExpression : captureExpressions) { Class<?> valueType = Primitives.wrap(captureExpression.getType().getJavaType()); Variable valueVariable = scope.createTempVariable(valueType); block.append(context.generate(captureExpression)); block.append(boxPrimitiveIfNecessary(scope, valueType)); block.putVariable(valueVariable); block.append(wasNull.set(constantFalse())); captureVariableBuilder.add(valueVariable); } List<BytecodeExpression> captureVariables = ImmutableList.<BytecodeExpression>builder() .add(scope.getThis(), scope.getVariable("session")) .addAll(captureVariableBuilder.build()) .build(); Type instantiatedMethodAsmType = getMethodType( compiledLambda.getReturnType().getAsmType(), compiledLambda.getParameterTypes().stream() .skip(captureExpressions.size() + 1) // skip capture variables and ConnectorSession .map(ParameterizedType::getAsmType) .collect(toImmutableList()).toArray(new Type[0])); block.append( invokeDynamic( LAMBDA_CAPTURE_METHOD, ImmutableList.of( getType(getSingleApplyMethod(lambdaInterface)), compiledLambda.getLambdaAsmHandle(), instantiatedMethodAsmType), "apply", type(lambdaInterface), captureVariables)); return block; } public static Class<? extends LambdaProvider> compileLambdaProvider(LambdaDefinitionExpression lambdaExpression, Metadata metadata, Class<?> lambdaInterface) { ClassDefinition lambdaProviderClassDefinition = new ClassDefinition( a(PUBLIC, Access.FINAL), makeClassName("LambdaProvider"), type(Object.class), type(LambdaProvider.class)); FieldDefinition sessionField = lambdaProviderClassDefinition.declareField(a(PRIVATE), "session", ConnectorSession.class); CallSiteBinder callSiteBinder = new CallSiteBinder(); CachedInstanceBinder cachedInstanceBinder = new CachedInstanceBinder(lambdaProviderClassDefinition, callSiteBinder); Map<LambdaDefinitionExpression, CompiledLambda> compiledLambdaMap = generateMethodsForLambda( lambdaProviderClassDefinition, callSiteBinder, cachedInstanceBinder, lambdaExpression, metadata); MethodDefinition method = lambdaProviderClassDefinition.declareMethod( a(PUBLIC), "getLambda", type(Object.class), ImmutableList.of()); Scope scope = method.getScope(); BytecodeBlock body = method.getBody(); scope.declareVariable("wasNull", body, constantFalse()); scope.declareVariable("session", body, method.getThis().getField(sessionField)); RowExpressionCompiler rowExpressionCompiler = new RowExpressionCompiler( callSiteBinder, cachedInstanceBinder, variableReferenceCompiler(ImmutableMap.of()), metadata, compiledLambdaMap); BytecodeGeneratorContext generatorContext = new BytecodeGeneratorContext( rowExpressionCompiler, scope, callSiteBinder, cachedInstanceBinder, metadata); body.append( generateLambda( generatorContext, ImmutableList.of(), compiledLambdaMap.get(lambdaExpression), lambdaInterface)) .retObject(); // constructor Parameter sessionParameter = arg("session", ConnectorSession.class); MethodDefinition constructorDefinition = lambdaProviderClassDefinition.declareConstructor(a(PUBLIC), sessionParameter); BytecodeBlock constructorBody = constructorDefinition.getBody(); Variable constructorThisVariable = constructorDefinition.getThis(); constructorBody.comment("super();") .append(constructorThisVariable) .invokeConstructor(Object.class) .append(constructorThisVariable.setField(sessionField, sessionParameter)); cachedInstanceBinder.generateInitializations(constructorThisVariable, constructorBody); constructorBody.ret(); return defineClass(lambdaProviderClassDefinition, LambdaProvider.class, callSiteBinder.getBindings(), AccumulatorCompiler.class.getClassLoader()); } private static Method getSingleApplyMethod(Class<?> lambdaFunctionInterface) { checkCondition(lambdaFunctionInterface.isAnnotationPresent(FunctionalInterface.class), COMPILER_ERROR, "Lambda function interface is required to be annotated with FunctionalInterface"); List<Method> applyMethods = Arrays.stream(lambdaFunctionInterface.getMethods()) .filter(method -> method.getName().equals("apply")) .collect(toImmutableList()); checkCondition(applyMethods.size() == 1, COMPILER_ERROR, "Expect to have exactly 1 method with name 'apply' in interface " + lambdaFunctionInterface.getName()); return applyMethods.get(0); } private static RowExpressionVisitor<BytecodeNode, Scope> variableReferenceCompiler(Map<String, ParameterAndType> parameterMap) { return new RowExpressionVisitor<>() { @Override public BytecodeNode visitInputReference(InputReferenceExpression node, Scope scope) { throw new UnsupportedOperationException(); } @Override public BytecodeNode visitCall(CallExpression call, Scope scope) { throw new UnsupportedOperationException(); } @Override public BytecodeNode visitSpecialForm(SpecialForm specialForm, Scope context) { throw new UnsupportedOperationException(); } @Override public BytecodeNode visitConstant(ConstantExpression literal, Scope scope) { throw new UnsupportedOperationException(); } @Override public BytecodeNode visitLambda(LambdaDefinitionExpression lambda, Scope context) { throw new UnsupportedOperationException(); } @Override public BytecodeNode visitVariableReference(VariableReferenceExpression reference, Scope context) { ParameterAndType parameterAndType = parameterMap.get(reference.getName()); Parameter parameter = parameterAndType.getParameter(); Class<?> type = parameterAndType.getType(); return new BytecodeBlock() .append(parameter) .append(unboxPrimitiveIfNecessary(context, type)); } }; } static class CompiledLambda { // lambda method information private final Handle lambdaAsmHandle; private final ParameterizedType returnType; private final List<ParameterizedType> parameterTypes; public CompiledLambda( Handle lambdaAsmHandle, ParameterizedType returnType, List<ParameterizedType> parameterTypes) { this.lambdaAsmHandle = requireNonNull(lambdaAsmHandle, "lambdaMethodAsmHandle is null"); this.returnType = requireNonNull(returnType, "returnType is null"); this.parameterTypes = ImmutableList.copyOf(requireNonNull(parameterTypes, "returnType is null")); } public Handle getLambdaAsmHandle() { return lambdaAsmHandle; } public ParameterizedType getReturnType() { return returnType; } public List<ParameterizedType> getParameterTypes() { return parameterTypes; } } }
/* * Copyright (c) 2004-2022, University of Oslo * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * Neither the name of the HISP project nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.hisp.dhis.webapi.controller; import static org.hisp.dhis.dxf2.webmessage.WebMessageUtils.conflict; import static org.hisp.dhis.dxf2.webmessage.WebMessageUtils.notFound; import static org.hisp.dhis.system.util.CodecUtils.filenameEncode; import static org.hisp.dhis.webapi.utils.ContextUtils.CONTENT_TYPE_JSON; import java.io.IOException; import java.util.Date; import javax.servlet.http.HttpServletResponse; import lombok.AllArgsConstructor; import lombok.NonNull; import org.hisp.dhis.category.CategoryOptionCombo; import org.hisp.dhis.category.CategoryService; import org.hisp.dhis.common.DhisApiVersion; import org.hisp.dhis.common.Grid; import org.hisp.dhis.common.cache.CacheStrategy; import org.hisp.dhis.dataelement.DataElement; import org.hisp.dhis.dataelement.DataElementService; import org.hisp.dhis.dxf2.webmessage.WebMessageException; import org.hisp.dhis.i18n.I18nManager; import org.hisp.dhis.indicator.Indicator; import org.hisp.dhis.indicator.IndicatorService; import org.hisp.dhis.organisationunit.OrganisationUnit; import org.hisp.dhis.organisationunit.OrganisationUnitService; import org.hisp.dhis.period.Period; import org.hisp.dhis.period.PeriodType; import org.hisp.dhis.render.RenderService; import org.hisp.dhis.system.grid.GridUtils; import org.hisp.dhis.system.util.CodecUtils; import org.hisp.dhis.user.CurrentUserService; import org.hisp.dhis.visualization.ChartService; import org.hisp.dhis.visualization.PlotData; import org.hisp.dhis.visualization.Visualization; import org.hisp.dhis.visualization.VisualizationGridService; import org.hisp.dhis.visualization.VisualizationService; import org.hisp.dhis.visualization.VisualizationType; import org.hisp.dhis.webapi.mvc.annotation.ApiVersion; import org.hisp.dhis.webapi.utils.ContextUtils; import org.jfree.chart.ChartUtils; import org.jfree.chart.JFreeChart; import org.springframework.ui.Model; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseBody; import org.springframework.web.bind.annotation.RestController; @RestController @AllArgsConstructor @ApiVersion( { DhisApiVersion.DEFAULT, DhisApiVersion.ALL } ) public class VisualizationDataController { @NonNull private OrganisationUnitService organisationUnitService; @NonNull private final ContextUtils contextUtils; @NonNull private final VisualizationService visualizationService; @NonNull private final VisualizationGridService visualizationGridService; @NonNull private final ChartService chartService; @NonNull private final DataElementService dataElementService; @NonNull private final CategoryService categoryService; @NonNull private final IndicatorService indicatorService; @NonNull private final I18nManager i18nManager; @NonNull private final CurrentUserService currentUserService; @NonNull private final RenderService renderService; @GetMapping( value = "/visualizations/{uid}/data.html" ) public @ResponseBody Grid getVisualizationDataHtml( @PathVariable( "uid" ) String uid, Model model, @RequestParam( value = "ou", required = false ) String organisationUnitUid, @RequestParam( value = "date", required = false ) Date date ) { return getReportTableGrid( uid, organisationUnitUid, date ); } @GetMapping( value = "/visualizations/{uid}/data.html+css" ) public void getVisualizationDataHtmlCss( @PathVariable( "uid" ) String uid, @RequestParam( value = "ou", required = false ) String organisationUnitUid, @RequestParam( value = "date", required = false ) Date date, HttpServletResponse response ) throws Exception { Grid grid = getReportTableGrid( uid, organisationUnitUid, date ); String filename = filenameEncode( grid.getTitle() ) + ".html"; contextUtils.configureResponse( response, ContextUtils.CONTENT_TYPE_HTML, CacheStrategy.RESPECT_SYSTEM_SETTING, filename, false ); GridUtils.toHtmlCss( grid, response.getWriter() ); } @GetMapping( value = "/visualizations/{uid}/data.xml" ) public void getVisualizationDataXml( @PathVariable( "uid" ) String uid, @RequestParam( value = "ou", required = false ) String organisationUnitUid, @RequestParam( value = "date", required = false ) Date date, HttpServletResponse response ) throws Exception { Grid grid = getReportTableGrid( uid, organisationUnitUid, date ); String filename = filenameEncode( grid.getTitle() ) + ".xml"; contextUtils.configureResponse( response, ContextUtils.CONTENT_TYPE_XML, CacheStrategy.RESPECT_SYSTEM_SETTING, filename, false ); GridUtils.toXml( grid, response.getOutputStream() ); } @GetMapping( value = "/visualizations/{uid}/data.pdf" ) public void getVisualizationDataPdf( @PathVariable( "uid" ) String uid, @RequestParam( value = "ou", required = false ) String organisationUnitUid, @RequestParam( value = "date", required = false ) Date date, HttpServletResponse response ) throws Exception { Grid grid = getReportTableGrid( uid, organisationUnitUid, date ); String filename = filenameEncode( grid.getTitle() ) + ".pdf"; contextUtils.configureResponse( response, ContextUtils.CONTENT_TYPE_PDF, CacheStrategy.RESPECT_SYSTEM_SETTING, filename, false ); GridUtils.toPdf( grid, response.getOutputStream() ); } @GetMapping( value = "/visualizations/{uid}/data.xls" ) public void getVisualizationDataXls( @PathVariable( "uid" ) String uid, @RequestParam( value = "ou", required = false ) String organisationUnitUid, @RequestParam( value = "date", required = false ) Date date, HttpServletResponse response ) throws Exception { Grid grid = getReportTableGrid( uid, organisationUnitUid, date ); String filename = filenameEncode( grid.getTitle() ) + ".xls"; contextUtils.configureResponse( response, ContextUtils.CONTENT_TYPE_EXCEL, CacheStrategy.RESPECT_SYSTEM_SETTING, filename, true ); GridUtils.toXls( grid, response.getOutputStream() ); } @GetMapping( value = "/visualizations/{uid}/data.csv" ) public void getVisualizationDataCsv( @PathVariable( "uid" ) String uid, @RequestParam( value = "ou", required = false ) String organisationUnitUid, @RequestParam( value = "date", required = false ) Date date, HttpServletResponse response ) throws Exception { Grid grid = getReportTableGrid( uid, organisationUnitUid, date ); String filename = filenameEncode( grid.getTitle() ) + ".csv"; contextUtils.configureResponse( response, ContextUtils.CONTENT_TYPE_CSV, CacheStrategy.RESPECT_SYSTEM_SETTING, filename, true ); GridUtils.toCsv( grid, response.getWriter() ); } @GetMapping( value = { "/visualizations/{uid}/data", "/visualizations/{uid}/data.png" } ) public void getVisualizationData( @PathVariable( "uid" ) String uid, @RequestParam( value = "date", required = false ) Date date, @RequestParam( value = "ou", required = false ) String ou, @RequestParam( value = "width", defaultValue = "800", required = false ) int width, @RequestParam( value = "height", defaultValue = "500", required = false ) int height, @RequestParam( value = "attachment", required = false ) boolean attachment, HttpServletResponse response ) throws IOException, WebMessageException { final Visualization visualization = visualizationService.getVisualizationNoAcl( uid ); if ( visualization == null ) { throw new WebMessageException( notFound( "Visualization does not exist: " + uid ) ); } if ( visualization.isChart() && isChartSupported( visualization.getType() ) ) { OrganisationUnit unit = ou != null ? organisationUnitService.getOrganisationUnit( ou ) : null; JFreeChart jFreeChart = chartService.getJFreeChart( new PlotData( visualization ), date, unit, i18nManager.getI18nFormat(), currentUserService.getCurrentUser() ); String filename = CodecUtils.filenameEncode( visualization.getName() ) + ".png"; contextUtils.configureResponse( response, ContextUtils.CONTENT_TYPE_PNG, CacheStrategy.RESPECT_SYSTEM_SETTING, filename, attachment ); ChartUtils.writeChartAsPNG( response.getOutputStream(), jFreeChart, width, height ); } else { response.setContentType( CONTENT_TYPE_JSON ); renderService.toJson( response.getOutputStream(), getReportTableGrid( uid, ou, date ) ); } } @GetMapping( value = { "/visualizations/data", "/visualizations/data.png" } ) public void getVisualizationChartData( @RequestParam( value = "in" ) String indicatorUid, @RequestParam( value = "ou" ) String organisationUnitUid, @RequestParam( value = "periods", required = false ) boolean periods, @RequestParam( value = "width", defaultValue = "800", required = false ) int width, @RequestParam( value = "height", defaultValue = "500", required = false ) int height, @RequestParam( value = "skipTitle", required = false ) boolean skipTitle, @RequestParam( value = "attachment", required = false ) boolean attachment, HttpServletResponse response ) throws IOException { Indicator indicator = indicatorService.getIndicator( indicatorUid ); OrganisationUnit unit = organisationUnitService.getOrganisationUnit( organisationUnitUid ); JFreeChart chart; if ( periods ) { chart = chartService.getJFreePeriodChart( indicator, unit, !skipTitle, i18nManager.getI18nFormat() ); } else { chart = chartService.getJFreeOrganisationUnitChart( indicator, unit, !skipTitle, i18nManager.getI18nFormat() ); } contextUtils.configureResponse( response, ContextUtils.CONTENT_TYPE_PNG, CacheStrategy.RESPECT_SYSTEM_SETTING, "chart.png", attachment ); ChartUtils.writeChartAsPNG( response.getOutputStream(), chart, width, height ); } @GetMapping( value = { "/visualizations/history/data", "/visualizations/history/data.png" } ) public void getVisualizationChartHistory( @RequestParam String de, @RequestParam String co, @RequestParam String cp, @RequestParam String pe, @RequestParam String ou, @RequestParam( defaultValue = "525", required = false ) int width, @RequestParam( defaultValue = "300", required = false ) int height, HttpServletResponse response ) throws IOException, WebMessageException { DataElement dataElement = dataElementService.getDataElement( de ); if ( dataElement == null ) { throw new WebMessageException( conflict( "Data element does not exist: " + de ) ); } CategoryOptionCombo categoryOptionCombo = categoryService.getCategoryOptionCombo( co ); if ( categoryOptionCombo == null ) { throw new WebMessageException( conflict( "Category option combo does not exist: " + co ) ); } CategoryOptionCombo attributeOptionCombo = categoryService.getCategoryOptionCombo( cp ); if ( attributeOptionCombo == null ) { throw new WebMessageException( conflict( "Category option combo does not exist: " + cp ) ); } Period period = PeriodType.getPeriodFromIsoString( pe ); if ( period == null ) { throw new WebMessageException( conflict( "Period does not exist: " + pe ) ); } OrganisationUnit organisationUnit = organisationUnitService.getOrganisationUnit( ou ); if ( organisationUnit == null ) { throw new WebMessageException( conflict( "Organisation unit does not exist: " + ou ) ); } contextUtils.configureResponse( response, ContextUtils.CONTENT_TYPE_PNG, CacheStrategy.RESPECT_SYSTEM_SETTING, "chart.png", false ); JFreeChart chart = chartService.getJFreeChartHistory( dataElement, categoryOptionCombo, attributeOptionCombo, period, organisationUnit, 13, i18nManager.getI18nFormat() ); ChartUtils.writeChartAsPNG( response.getOutputStream(), chart, width, height ); } private Grid getReportTableGrid( String uid, String organisationUnitUid, Date date ) { Visualization visualization = visualizationService.getVisualizationNoAcl( uid ); if ( organisationUnitUid == null && visualization.hasReportingParams() && visualization.getReportingParams().isOrganisationUnitSet() ) { organisationUnitUid = organisationUnitService.getRootOrganisationUnits().iterator().next().getUid(); } date = date != null ? date : new Date(); return visualizationGridService.getVisualizationGrid( uid, date, organisationUnitUid ); } private boolean isChartSupported( final VisualizationType type ) { return type == VisualizationType.LINE || type == VisualizationType.COLUMN || type == VisualizationType.BAR || type == VisualizationType.AREA || type == VisualizationType.PIE || type == VisualizationType.STACKED_COLUMN || type == VisualizationType.STACKED_BAR || type == VisualizationType.RADAR || type == VisualizationType.GAUGE; } }
/* * Copyright (c) 2009-2013, United States Government, as represented by the Secretary of Health and Human Services. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above * copyright notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * Neither the name of the United States Government nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE UNITED STATES GOVERNMENT BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package gov.hhs.fha.nhinc.callback.cxf; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import gov.hhs.fha.nhinc.callback.openSAML.OpensamlObjectBuilderUtil; import gov.hhs.fha.nhinc.nhinclib.NhincConstants; import org.junit.Test; import org.opensaml.saml2.core.Assertion; import org.opensaml.saml2.core.Issuer; import org.opensaml.saml2.core.NameID; import org.opensaml.saml2.core.Subject; import org.opensaml.xml.ConfigurationException; import org.opensaml.xml.validation.ValidationException; /** * The Class Saml2ExchangeAuthFrameworkValidatorTest. * * @author msw */ public class Saml2ExchangeAuthFrameworkValidatorTest { /** * Test validate happy path. * * @throws ValidationException the validation exception */ @Test public void testValidate() throws ValidationException { Saml2ExchangeAuthFrameworkValidator validator = new Saml2ExchangeAuthFrameworkValidator(); Assertion assertion = mock(Assertion.class); Subject subject = mock(Subject.class); NameID name = mock(NameID.class); Issuer issuer = mock(Issuer.class); when(assertion.getSubject()).thenReturn(subject); when(subject.getNameID()).thenReturn(name); when(name.getFormat()).thenReturn(NhincConstants.AUTH_FRWK_NAME_ID_FORMAT_X509); when(name.getValue()).thenReturn(NhincConstants.SAML_DEFAULT_ISSUER_NAME); when(assertion.getIssuer()).thenReturn(issuer); when(issuer.getFormat()).thenReturn(NhincConstants.AUTH_FRWK_NAME_ID_FORMAT_X509); when(issuer.getValue()).thenReturn(NhincConstants.SAML_DEFAULT_ISSUER_NAME); validator.validate(assertion); } /** * Test validate a different happy path. * * @throws ValidationException the validation exception */ @Test public void testValidate2() throws ValidationException { Saml2ExchangeAuthFrameworkValidator validator = new Saml2ExchangeAuthFrameworkValidator(); Assertion assertion = mock(Assertion.class); Subject subject = mock(Subject.class); NameID name = mock(NameID.class); Issuer issuer = mock(Issuer.class); when(assertion.getSubject()).thenReturn(subject); when(subject.getNameID()).thenReturn(name); when(name.getFormat()).thenReturn(NhincConstants.AUTH_FRWK_NAME_ID_FORMAT_EMAIL_ADDRESS); when(name.getValue()).thenReturn("example@example.org"); when(assertion.getIssuer()).thenReturn(issuer); when(issuer.getFormat()).thenReturn(NhincConstants.AUTH_FRWK_NAME_ID_FORMAT_EMAIL_ADDRESS); when(issuer.getValue()).thenReturn("example@example.org"); validator.validate(assertion); } /** * Test validate no name subject. This tests DIL test case 3.421. * * @throws ValidationException the validation exception */ @Test(expected = ValidationException.class) public void testValidateNoNameSubject() throws ValidationException { Saml2ExchangeAuthFrameworkValidator validator = new Saml2ExchangeAuthFrameworkValidator(); Assertion assertion = mock(Assertion.class); Subject subject = mock(Subject.class); when(assertion.getSubject()).thenReturn(subject); validator.validate(assertion); } /** * Test validate subject name format is not valid. This tests DIL test case 3.422. * * @throws ValidationException the validation exception */ @Test(expected = ValidationException.class) public void testValidateSubjectWrongFormat() throws ValidationException { Saml2ExchangeAuthFrameworkValidator validator = new Saml2ExchangeAuthFrameworkValidator(); Assertion assertion = mock(Assertion.class); Subject subject = mock(Subject.class); NameID name = mock(NameID.class); when(assertion.getSubject()).thenReturn(subject); when(subject.getNameID()).thenReturn(name); when(name.getFormat()).thenReturn("wrong value"); validator.validate(assertion); } /** * Test validate no name issuer format. This tests DIL test case 3.410. * * @throws ValidationException the validation exception */ @Test(expected = ValidationException.class) public void testValidateIssuerNoFormat() throws ValidationException { Saml2ExchangeAuthFrameworkValidator validator = new Saml2ExchangeAuthFrameworkValidator(); Assertion assertion = mock(Assertion.class); Subject subject = mock(Subject.class); NameID name = mock(NameID.class); Issuer issuer = mock(Issuer.class); when(assertion.getSubject()).thenReturn(subject); when(subject.getNameID()).thenReturn(name); when(name.getFormat()).thenReturn(NhincConstants.AUTH_FRWK_NAME_ID_FORMAT_EMAIL_ADDRESS); when(assertion.getIssuer()).thenReturn(issuer); when(issuer.getFormat()).thenReturn(null); validator.validate(assertion); } /** * Test validate issuer is not a valid Email Address. This tests DIL test case 3.411. * * @throws ValidationException the validation exception * @throws ConfigurationException the configuration exception */ @Test(expected = ValidationException.class) public void testValidateIssuerNotEmailAddress() throws ValidationException, ConfigurationException { Saml2ExchangeAuthFrameworkValidator validator = new Saml2ExchangeAuthFrameworkValidator(); Assertion assertion = mock(Assertion.class); Subject subject = mock(Subject.class); NameID name = mock(NameID.class); Issuer issuer = generateIssuer(NhincConstants.AUTH_FRWK_NAME_ID_FORMAT_EMAIL_ADDRESS, "this is obviously not an email address....okkk?"); when(assertion.getSubject()).thenReturn(subject); when(subject.getNameID()).thenReturn(name); when(name.getFormat()).thenReturn(NhincConstants.AUTH_FRWK_NAME_ID_FORMAT_EMAIL_ADDRESS); when(assertion.getIssuer()).thenReturn(issuer); validator.validate(assertion); } /** * Test validate issuer is not a valid X509 Subject Name. This tests DIL test case 3.412. * * @throws ValidationException the validation exception * @throws ConfigurationException the configuration exception */ @Test(expected = ValidationException.class) public void testValidateIssuerNotX509SubjectName() throws ValidationException, ConfigurationException { Saml2ExchangeAuthFrameworkValidator validator = new Saml2ExchangeAuthFrameworkValidator(); Assertion assertion = mock(Assertion.class); Subject subject = mock(Subject.class); NameID name = mock(NameID.class); Issuer issuer = generateIssuer(NhincConstants.AUTH_FRWK_NAME_ID_FORMAT_X509, "this is obviously not an x509 subject name....okkk?"); when(assertion.getSubject()).thenReturn(subject); when(subject.getNameID()).thenReturn(name); when(name.getFormat()).thenReturn(NhincConstants.AUTH_FRWK_NAME_ID_FORMAT_X509); when(assertion.getIssuer()).thenReturn(issuer); validator.validate(assertion); } /** * Test validate issuer is not a valid Windows Domain Name. This tests DIL test case 3.413. * * @throws ValidationException the validation exception * @throws ConfigurationException the configuration exception */ @Test(expected = ValidationException.class) public void testValidateIssuerNotWindowsDomainName() throws ValidationException, ConfigurationException { Saml2ExchangeAuthFrameworkValidator validator = new Saml2ExchangeAuthFrameworkValidator(); Assertion assertion = mock(Assertion.class); Subject subject = mock(Subject.class); NameID name = mock(NameID.class); Issuer issuer = generateIssuer(NhincConstants.AUTH_FRWK_NAME_ID_FORMAT_WINDOWS_NAME, "this is obviously not an windows domain name....okkk?"); when(assertion.getSubject()).thenReturn(subject); when(subject.getNameID()).thenReturn(name); when(name.getFormat()).thenReturn(NhincConstants.AUTH_FRWK_NAME_ID_FORMAT_X509); when(assertion.getIssuer()).thenReturn(issuer); validator.validate(assertion); } /** * Test validate happy path. * * @throws ValidationException the validation exception */ @Test(expected = ValidationException.class) public void testValidateSubjectX509InvalidValue() throws ValidationException { Saml2ExchangeAuthFrameworkValidator validator = new Saml2ExchangeAuthFrameworkValidator(); Assertion assertion = mock(Assertion.class); Subject subject = mock(Subject.class); NameID name = mock(NameID.class); Issuer issuer = mock(Issuer.class); when(assertion.getSubject()).thenReturn(subject); when(subject.getNameID()).thenReturn(name); when(name.getFormat()).thenReturn(NhincConstants.AUTH_FRWK_NAME_ID_FORMAT_X509); when(name.getValue()).thenReturn("not a valid X509 name."); when(assertion.getIssuer()).thenReturn(issuer); when(issuer.getFormat()).thenReturn(NhincConstants.AUTH_FRWK_NAME_ID_FORMAT_X509); when(issuer.getValue()).thenReturn(NhincConstants.SAML_DEFAULT_ISSUER_NAME); validator.validate(assertion); } /** * Test validate happy path. * * @throws ValidationException the validation exception */ @Test(expected = ValidationException.class) public void testValidateSubjectEmailInvalidValue() throws ValidationException { Saml2ExchangeAuthFrameworkValidator validator = new Saml2ExchangeAuthFrameworkValidator(); Assertion assertion = mock(Assertion.class); Subject subject = mock(Subject.class); NameID name = mock(NameID.class); Issuer issuer = mock(Issuer.class); when(assertion.getSubject()).thenReturn(subject); when(subject.getNameID()).thenReturn(name); when(name.getFormat()).thenReturn(NhincConstants.AUTH_FRWK_NAME_ID_FORMAT_EMAIL_ADDRESS); when(name.getValue()).thenReturn("not a valid email address."); when(assertion.getIssuer()).thenReturn(issuer); when(issuer.getFormat()).thenReturn(NhincConstants.AUTH_FRWK_NAME_ID_FORMAT_X509); when(issuer.getValue()).thenReturn(NhincConstants.SAML_DEFAULT_ISSUER_NAME); validator.validate(assertion); } /** * Generate issuer. * * @param format the format * @param value the value * @return the issuer * @throws ConfigurationException the configuration exception */ protected Issuer generateIssuer(String format, String value) throws ConfigurationException { OpensamlObjectBuilderUtil util = new OpensamlObjectBuilderUtil(); Issuer issuer = (Issuer) util.createOpenSAMLObject(Issuer.DEFAULT_ELEMENT_NAME); issuer.setFormat(format); issuer.setValue(value); return issuer; } }
package Data; import java.sql.*; import java.util.Arrays; import java.util.LinkedList; import java.util.StringTokenizer; /** * This class will handle all of the sql queries */ public class QueryAdaptor{ private static Connection connect=null; private static Statement statement=null; private static ResultSet resultSet=null; private static ResultSet resultSet2=null; /** * finds if the user exists in the table * @param usn * @return whether the user exists or not * @throws Exception */ public static boolean findUser(String usn) throws Exception{ ResultSet test = query("SELECT Username FROM user WHERE Username = '"+usn+"'"); return test.first(); } /** * finds if there the username matches the password * @param usn * @param pwd * @return whether the user is mapped to the password * @throws Exception */ public static boolean matchPassword(String usn, String pwd) throws Exception{ ResultSet test = query("SELECT Username,Password FROM user WHERE Username = '"+usn+"' AND " + "Password = '"+pwd+"'"); return test.first(); } public static String[] getMemberInfo(String usn) throws Exception{ String[] ret = new String[9]; Arrays.fill(ret, " "); ResultSet resultSet = query("SELECT * FROM member WHERE Username = '"+usn+"'"); resultSet.first(); for(int i=1;i<=resultSet.getMetaData().getColumnCount();i++){ ret[i-1] = resultSet.getString(i); }return ret; } /** * obtains the usertype form the username * map: * 1 Employee * 2 Admin * 3 Member * @param usn * @return * @throws Exception */ public static char getMemberType(String usn) throws Exception{ ResultSet test = query("SELECT GTCR_Employee_Flag,GTCR_Admin_Flag,GTCR_Member FROM user WHERE Username = '"+usn+"'"); test.first(); // should only have one truple if(test.getInt(1)==1) return '1'; else if(test.getInt(2)==1) return '2'; else return '3'; } /** * tests if the use has a card * @param UserName * @return * @throws Exception */ public static boolean hasCard(String UserName) throws Exception{ ResultSet test = query("SELECT CardNo FROM member WHERE Username = '"+UserName+"'"); return test.first(); } /* * test whether a car model exists in a specific location * if true then we can't add it there */ public static boolean carExists(String loc, String carModel) throws Exception{ resultSet = query("SELECT * FROM car WHERE CarLocation = '"+loc+"' AND CarModel = '"+carModel+"'"); return resultSet.first(); } /* * gets the credit card info */ public static String[] pullCardInfo(String CardNo) throws Exception{ resultSet = query("Select Name,CVV,ExpiryDate,BillingAdd FROM creditcard WHERE CardNo = '"+CardNo+"'"); resultSet.first(); String[] ret = new String[resultSet.getMetaData().getColumnCount()]; for(int i=1;i<=resultSet.getMetaData().getColumnCount();i++){ ret[i-1] = resultSet.getString(i); }return ret; } /** * gets all the location names * @return * @throws Exception */ public static String[] getLocationName() throws Exception{ String[] ret; LinkedList<String> store = new LinkedList<String>(); resultSet = query("SELECT LocationName FROM location"); for (;resultSet.next();) { for(int i=1;i<=resultSet.getMetaData().getColumnCount();i++){ store.add(resultSet.getString(i)); } } ret = new String[store.size()]; for(int i=0;!store.isEmpty();i++){ ret[i] = store.poll(); } return ret; } /** * returns all the car types on the car table * @return * @throws Exception */ public static String[] getCarTypeList() throws Exception{ String[] ret; LinkedList<String> store = new LinkedList<String>(); resultSet = query("SELECT Type FROM car"); for (;resultSet.next();) { for(int i=1;i<=resultSet.getMetaData().getColumnCount();i++){ store.add(resultSet.getString(i)); } } ret = new String[store.size()]; for(int i=0;!store.isEmpty();i++){ ret[i] = store.poll(); } return ret; } /** * @param loc * @param carModel * @return list of all cars available according to location and carModel * @throws Exception */ public static String[] getCarTypeList(String loc, String carModel) throws Exception{ String[] ret; LinkedList<String> store = new LinkedList<String>(); resultSet = query("SELECT DISTINCT Type FROM car WHERE CarLocation = '"+loc+"' AND CarModel = '"+carModel+"'"); for (;resultSet.next();) { for(int i=1;i<=resultSet.getMetaData().getColumnCount();i++){ store.add(resultSet.getString(i)); } } ret = new String[store.size()]; for(int i=0;!store.isEmpty();i++){ ret[i] = store.poll(); } return ret; } /** * returns all the cars on the car table * @return * @throws Exception */ public static String[] getCarList() throws Exception{ String[] ret; LinkedList<String> store = new LinkedList<String>(); resultSet = query("SELECT DISTINCT CarModel FROM car"); for (;resultSet.next();) { for(int i=1;i<=resultSet.getMetaData().getColumnCount();i++){ store.add(resultSet.getString(i)); } } ret = new String[store.size()]; for(int i=0;!store.isEmpty();i++){ ret[i] = store.poll(); } return ret; } /** * return cars in a specific location * @param loc * @return * @throws Exception */ public static String[] getCarList(String loc) throws Exception{ String[] ret; LinkedList<String> store = new LinkedList<String>(); resultSet = query("SELECT DISTINCT CarModel FROM car WHERE CarLocation = '"+loc+"'"); for (;resultSet.next();) { for(int i=1;i<=resultSet.getMetaData().getColumnCount();i++){ store.add(resultSet.getString(i)); } } ret = new String[store.size()]; for(int i=0;!store.isEmpty();i++){ ret[i] = store.poll(); } return ret; } /** * returns list of available cars given the following filtering parameters * @param loc The location of the car * @param model The model of the car.PickUpDateTime BETWEEN '"+startDate+" "+startTime+"' AND '"returnDate+" "+returnTime+"' ) OR (r.ReturnDateTime BETWEEN '"+startDate+" "+startTime+"' AND '2013-05-21 00:00:00 * @param type The type of the car * @param startDate The starting rental date of the potential car * @param startTime The starting time of the potential car * @param returnDate The returning rental date of the potential car * @param returnTime The returning rental time of the potential car * @return 2-D list of all available cars with their full information in each inner array * @throws Exception */ public static String[][] getCarAvailabilityList(String username, String loc, String model, String type, String startDate, String startTime, String returnDate, String returnTime) throws Exception{ resultSet = query("SELECT DISTINCT c.VehicleSno, CarModel, Type, CarLocation, Color, HourlyRate, DailyRate, Seating_Capacity, Transmission_Type, BluetoothConnectivity, AuxiliaryCable "+ "FROM car c "+ "WHERE c.VehicleSno NOT IN (SELECT r.VehicleSno FROM reservation r WHERE ((r.PickUpDateTime BETWEEN '"+startDate+" "+startTime+"' AND '"+returnDate+" "+returnTime+"' ) OR (r.ReturnDateTime BETWEEN '"+startDate+" "+startTime+"' AND '"+returnDate+" "+returnTime+"' ))) "+ // "AND c.UnderMaintenanceFlag = 0 "+ "AND c.CarLocation = '"+loc+"' "+ "AND c.CarModel = '"+model+"'"); LinkedList<String[]> temp = new LinkedList<String[]>(); int currind = 0; while(resultSet.next()){ temp.add(new String[resultSet.getMetaData().getColumnCount()]); for(int i=1;i<=resultSet.getMetaData().getColumnCount();i++){ temp.get(currind)[i-1] = (resultSet.getString(i)); }currind++; } ResultSet plan = query("SELECT Discount,MonthlyPayment,AnnualFees FROM drivingplan"); int drivingPlan[][] = new int[3][3],i; for(i = 0,plan.next();i<drivingPlan.length;i++,plan.next()){ for(int j=0;j<drivingPlan.length;j++){ drivingPlan[i][j] = Integer.parseInt(plan.getString(j+1)); } }ResultSet userPlan = query("SELECT DrivingPlan FROM member WHERE Username = '"+username+"'"); userPlan.first(); StringTokenizer st = new StringTokenizer(userPlan.getString(1)); String usnplan = st.nextToken(); int planID; if(usnplan.equals("Occasional")) planID = 0; else if(usnplan.equals("Frequent")) planID = 1; else planID = 2; String[][] ret = new String[temp.size()][temp.get(0).length+3]; for(i=0;i<ret.length;i++) for(int j = 0;j<6;j++) ret[i][j] = temp.get(i)[j]; for(i=0;i<ret.length;i++){ ret[i][6]= Double.toString((.15*Double.parseDouble(temp.get(i)[5]))); ret[i][7] = Double.toString(.1*Double.parseDouble(temp.get(i)[5])); } for(i=0;i<ret.length;i++){ for(int j=0;j<5;j++){ ret[i][8+j] = temp.get(i)[temp.get(i).length-1-5+j]; } }int totHours = Util.getDifference(returnDate, startDate, returnTime, startTime); for(i=0;i<ret.length;i++){ ret[i][ret[i].length-1] = ""+((totHours/24)*Double.parseDouble(ret[i][8]) + (totHours%24)*Double.parseDouble(ret[i][5+planID])); } return ret; } public static String[][] getOtherCarAvailabilityList(String username, String loc, String model, String type, String startDate, String startTime, String returnDate, String returnTime) throws Exception{ resultSet = query("SELECT DISTINCT c.VehicleSno, CarModel, Type, CarLocation, Color, HourlyRate, DailyRate, Seating_Capacity, Transmission_Type, BluetoothConnectivity, AuxiliaryCable "+ "FROM car c "+ "WHERE c.VehicleSno NOT IN (SELECT r.VehicleSno FROM reservation r WHERE ((r.PickUpDateTime BETWEEN '"+startDate+" "+startTime+"' AND '"+returnDate+" "+returnTime+"' ) OR (r.ReturnDateTime BETWEEN '"+startDate+" "+startTime+"' AND '"+returnDate+" "+returnTime+"' ))) "+ "AND (c.CarLocation <> '"+loc+"' "+ "OR c.CarModel <> '"+model+"')"); LinkedList<String[]> temp = new LinkedList<String[]>(); int currind = 0; while(resultSet.next()){ temp.add(new String[resultSet.getMetaData().getColumnCount()]); for(int i=1;i<=resultSet.getMetaData().getColumnCount();i++){ temp.get(currind)[i-1] = (resultSet.getString(i)); }currind++; } ResultSet plan = query("SELECT Discount,MonthlyPayment,AnnualFees FROM drivingplan"); int drivingPlan[][] = new int[3][3],i; for(i = 0,plan.next();i<drivingPlan.length;i++,plan.next()){ for(int j=0;j<drivingPlan.length;j++){ drivingPlan[i][j] = Integer.parseInt(plan.getString(j+1)); } }ResultSet userPlan = query("SELECT DrivingPlan FROM member WHERE Username = '"+username+"'"); userPlan.first(); StringTokenizer st = new StringTokenizer(userPlan.getString(1)); String usnplan = st.nextToken(); int planID; if(usnplan.equals("Occasional")) planID = 0; else if(usnplan.equals("Frequent")) planID = 1; else planID = 2; String[][] ret = new String[temp.size()][temp.get(0).length+3]; for(i=0;i<ret.length;i++) for(int j = 0;j<6;j++) ret[i][j] = temp.get(i)[j]; for(i=0;i<ret.length;i++){ ret[i][6]= Float.toString(((float).15*Float.parseFloat(temp.get(i)[5]))); ret[i][7] = Float.toString((float).1*Float.parseFloat(temp.get(i)[5])); } for(i=0;i<ret.length;i++){ for(int j=0;j<5;j++){ ret[i][8+j] = temp.get(i)[temp.get(i).length-1-5+j]; } }int totHours = Util.getDifference(returnDate, startDate, returnTime, startTime); for(i=0;i<ret.length;i++){ ret[i][ret[i].length-1] = ""+((totHours/24)*Double.parseDouble(ret[i][8]) + (totHours%24)*Double.parseDouble(ret[i][5+planID])); } return ret; } /** * finds the vsn from the VehicleSno and location * @param loc * @param ModelName * @return * @throws Exception */ public static String findVsn(String loc, String ModelName) throws Exception{ resultSet = query("SELECT DISTINCT VehicleSno FROM car WHERE CarLocation = '"+loc+"' AND CarModel = '"+ModelName+"'"); resultSet.first(); String ret = resultSet.getString(1); return ret; } /** * see if there is a maintenance request */ public static boolean hasReservation(String usn) throws Exception{ resultSet = query("SELECT * FROM reservation WHERE Username = '"+usn+"'"); return resultSet.first(); } //NOTE: This doesn't take into account that a user can have multiple reservations, so it will just return //info for the first reservation in the resultSet public static String[] getReservationInfo(String usn) throws Exception{ resultSet = query("SELECT CarModel, CarLocation, ReturnDateTime "+ "FROM reservation r, car c "+ "WHERE r.VehicleSno = c.VehicleSno "+ "AND r.Username = '"+usn+"' "+ "AND r.PickupDateTime<NOW() "+ "AND r.ReturnDateTime>ADDTIME(NOW(),'-02:00:00')"); if(!resultSet.first()) return null; String[] ret = new String[resultSet.getMetaData().getColumnCount()]; for(int i=1;i<=ret.length;i++){ ret[i-1] = resultSet.getString(i); } return ret; } /** * This queries for the current reservation of a user and formats it for the Rental Info Screen * @param usn String username of person who made reservation * @return 2D string of curr reservations * @throws Exception */ public static String[][] getCurrReservationInfo(String usn) throws Exception{ resultSet = query("SELECT r.ReturnDateTime, r.PickUpDateTime, c.CarModel, r.ReservationLocation, r.EstimatedCost, r.ResID "+ "FROM reservation r, car c "+ "WHERE r.VehicleSno = c.VehicleSno "+ "AND r.Username = '"+usn+"' "+ "AND r.PickupDateTime<NOW() "+ "AND r.ReturnDateTime>ADDTIME(NOW(),'-02:00:00')"); if(!resultSet.first()) return null; int numRows=1; while(resultSet.next()){ numRows++; } resultSet.first(); String[][]ret= new String[numRows][resultSet.getMetaData().getColumnCount()]; String[] firstRow = new String[resultSet.getMetaData().getColumnCount()]; for(int i=1;i<=firstRow.length;i++){ firstRow[i-1] = resultSet.getString(i); if (i==firstRow.length){ firstRow[1]= firstRow[1]+"-"+firstRow[0]; } } ret[0] = firstRow; int index=1; while(resultSet.next()){ String[] row = new String[resultSet.getMetaData().getColumnCount()]; for(int i=1;i<=row.length;i++){ row[i-1] = resultSet.getString(i); if (i==row.length){ row[1]= row[1]+"-"+row[0]; } } ret[index]=row; index++; } return ret; } /** * This queries for the current reservation of a user and formats it for the Rental Info Screen * @param usn String username of person who made reservation * @return 2D array of all past reservations * @throws Exception */ public static String[][] getPastReservationInfo(String usn) throws Exception{ resultSet = query("SELECT r.ReturnDateTime, r.PickUpDateTime, c.CarModel, r.ReservationLocation, r.EstimatedCost, r.ResID "+ "FROM reservation r, car c "+ "WHERE r.VehicleSno = c.VehicleSno "+ "AND r.Username = '"+usn+"' "+ "AND r.ReturnDateTime<ADDTIME(NOW(),'-02:00:00')"); if(!resultSet.first()) return null; int numRows=1; while(resultSet.next()){ numRows++; } resultSet.first(); String[][]ret= new String[numRows][resultSet.getMetaData().getColumnCount()]; String[] firstRow = new String[resultSet.getMetaData().getColumnCount()]; for(int i=1;i<=firstRow.length;i++){ firstRow[i-1] = resultSet.getString(i); if (i==firstRow.length){ firstRow[1]= firstRow[1]+"-"+firstRow[0]; } } ret[0] = firstRow; int index=1; while(resultSet.next()){ String[] row = new String[resultSet.getMetaData().getColumnCount()]; for(int i=1;i<=row.length;i++){ row[i-1] = resultSet.getString(i); if (i==row.length){ row[1]= row[1]+"-"+row[0]; } } ret[index]=row; index++; } return ret; } public static String[][] getRevenueGenerated() throws Exception{ resultSet = query("SELECT reservation.VehicleSno, car.type, car.CarModel, SUM(EstimatedCost) , SUM(LateFees) FROM reservation, car WHERE car.VehicleSno = reservation.VehicleSno AND PickUpDateTime >= \"2013-01-01\" AND PickUpDateTime <= \"2013-03-31\" GROUP BY reservation.VehicleSno"); LinkedList<String[]> ret = new LinkedList<String[]>(); int currind = 0; while(resultSet.next()){ ret.add(new String[resultSet.getMetaData().getColumnCount()]); for(int i=1;i<=resultSet.getMetaData().getColumnCount();i++){ ret.get(currind)[i-1] = resultSet.getString(i); }currind++; }String[][] ans = new String[ret.size()][resultSet.getMetaData().getColumnCount()]; for(int i = 0;i<ans.length;i++){ ans[i] = ret.poll(); }return ans; } public static String[][] getMaintenanceHistoryReport() throws Exception{ // resultSet = query("SELECT m.VehicleSno, m.RequestDateTime, m.Username, p.Problem "+ // "FROM maintenance_request m, maintenance_request_problems p "+ // "WHERE m.VehicleSno = p.VehicleSno"); resultSet = query("SELECT CarModel, maintenance_request.RequestDateTime, maintenance_request.Username, maintenance_request_problems.Problem FROM maintenance_request, car, maintenance_request_problems "+ "WHERE maintenance_request_problems.VehicleSno = maintenance_request.VehicleSno "+ "AND maintenance_request.RequestDateTime = maintenance_request_problems.RequestDateTime "+ " AND car.VehicleSno = maintenance_request.VehicleSno "+ "ORDER BY maintenance_request_problems.`VehicleSno` , maintenance_request_problems.Problem"); LinkedList<String[]> ret = new LinkedList<String[]>(); int currind = 0; while(resultSet.next()){ ret.add(new String[resultSet.getMetaData().getColumnCount()]); for(int i=1;i<=resultSet.getMetaData().getColumnCount();i++){ ret.get(currind)[i-1] = resultSet.getString(i); }currind++; }String[][] ans = new String[ret.size()][resultSet.getMetaData().getColumnCount()]; for(int i = 0;i<ans.length;i++){ ans[i] = ret.poll(); }return ans; } public static String[][] getFrequentUsersReport() throws Exception{ resultSet = query("Select member.Username, member.DrivingPlan, COUNT(ResID)/3 AS NoResPerMonth " + "FROM member, reservation " + "WHERE member.Username = reservation.Username " + "AND PickUpDateTime >= '2013-01-01' "+ "AND PickUpDateTime <= '2013-03-31' "+ "GROUP BY member.Username "+ "ORDER BY NoResPerMonth DESC "+ "LIMIT 5"); LinkedList<String[]> ret = new LinkedList<String[]>(); int currind = 0; while(resultSet.next()){ ret.add(new String[resultSet.getMetaData().getColumnCount()]); for(int i=1;i<=resultSet.getMetaData().getColumnCount();i++){ ret.get(currind)[i-1] = resultSet.getString(i); }currind++; }String[][] ans = new String[ret.size()][resultSet.getMetaData().getColumnCount()]; for(int i = 0;i<ans.length;i++){ ans[i] = ret.poll(); }return ans; } public static String[][] getLocationPreferenceReport() throws Exception{ resultSet = query("SELECT MONTH( `PickUpDateTime` ) AS " + "MONTH , `ReservationLocation` , COUNT( * ) , SUM( HOUR( TIMEDIFF( `ReturnDateTime` , `PickUpDateTime` ) ) ) " + "FROM reservation " + "WHERE PickUpDateTime >= '2013-01-01' " + "AND PickUpDateTime <= '2013-03-31' "+ "GROUP BY MONTH "+ "ORDER BY MONTH"); LinkedList<String[]> ret = new LinkedList<String[]>(); int currind = 0; while(resultSet.next()){ ret.add(new String[resultSet.getMetaData().getColumnCount()]); for(int i=1;i<=resultSet.getMetaData().getColumnCount();i++){ ret.get(currind)[i-1] = resultSet.getString(i); }currind++; }String[][] ans = new String[ret.size()][resultSet.getMetaData().getColumnCount()]; for(int i = 0;i<ans.length;i++){ ans[i] = ret.poll(); }return ans; } public static String getResID(String usn,String vsn) throws Exception{ resultSet = query("SELECT ResID FROM reservation WHERE Username = '"+usn+"' AND "+ "VehicleSno = '"+vsn+"'"); System.out.println(usn+" "+vsn); if(!resultSet.first()) return ""; return resultSet.getString(1); } public static String[] getUserAffected(String vsn,String urt,String name) throws Exception{ resultSet = query("SELECT m.Username, r.PickupDateTime, r.ReturnDateTime, m.EmailAddress, m.PhoneNo "+ "FROM reservation r, member m "+ "WHERE r.Username = m.Username "+ "AND r.PickupDateTime < '"+urt+"' "+ "AND r.Username <> '"+name+"' "+ "AND r.VehicleSno = '"+vsn+"'"); if(!resultSet.first()) return null; String ret[] = new String[resultSet.getMetaData().getColumnCount()]; for(int i=1;i<=resultSet.getMetaData().getColumnCount();i++){ ret[i-1] = resultSet.getString(i); } return ret; } /**********************************back end data access methods****************************/ /** * connects us to the server */ public static void connect(){ try{ Class.forName("com.mysql.jdbc.Driver").newInstance(); connect = DriverManager.getConnection("jdbc:mysql://academic-mysql.cc.gatech.edu/cs4400_Group_32","cs4400_Group_32","6ug5mecJ"); if(!connect.isClosed()) System.out.println("Connected To Database...."); }catch(Exception e){ System.out.println(e.getMessage()); } } /** * queries the sql statements * @param state * @return the raw result set * @throws Exception */ private static ResultSet query(String state) throws Exception{ try{ statement = connect.createStatement(); resultSet = statement.executeQuery(state); }catch(Exception e){ System.out.println(e.getMessage()); } return resultSet; } /** * closes the connection and statement */ public static void close(){ try { if(connect != null) statement.close(); connect.close(); System.out.println("Close Database"); } catch(SQLException e) {} } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.db.marshal; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Collections; import java.util.List; import org.apache.cassandra.cql3.Term; import org.apache.cassandra.serializers.TypeSerializer; import org.apache.cassandra.serializers.BytesSerializer; import org.apache.cassandra.serializers.MarshalException; import org.apache.cassandra.utils.ByteBufferUtil; /** * A class avoiding class duplication between CompositeType and * DynamicCompositeType. * Those two differs only in that for DynamicCompositeType, the comparators * are in the encoded column name at the front of each component. */ public abstract class AbstractCompositeType extends AbstractType<ByteBuffer> { public int compare(ByteBuffer o1, ByteBuffer o2) { if (!o1.hasRemaining() || !o2.hasRemaining()) return o1.hasRemaining() ? 1 : o2.hasRemaining() ? -1 : 0; ByteBuffer bb1 = o1.duplicate(); ByteBuffer bb2 = o2.duplicate(); boolean isStatic1 = readIsStatic(bb1); boolean isStatic2 = readIsStatic(bb2); if (isStatic1 != isStatic2) return isStatic1 ? -1 : 1; int i = 0; ByteBuffer previous = null; while (bb1.remaining() > 0 && bb2.remaining() > 0) { AbstractType<?> comparator = getComparator(i, bb1, bb2); ByteBuffer value1 = ByteBufferUtil.readBytesWithShortLength(bb1); ByteBuffer value2 = ByteBufferUtil.readBytesWithShortLength(bb2); int cmp = comparator.compareCollectionMembers(value1, value2, previous); if (cmp != 0) return cmp; previous = value1; byte b1 = bb1.get(); byte b2 = bb2.get(); if (b1 != b2) return b1 - b2; ++i; } if (bb1.remaining() == 0) return bb2.remaining() == 0 ? 0 : -1; // bb1.remaining() > 0 && bb2.remaining() == 0 return 1; } // Check if the provided BB represents a static name and advance the // buffer to the real beginning if so. protected abstract boolean readIsStatic(ByteBuffer bb); /** * Split a composite column names into it's components. */ public ByteBuffer[] split(ByteBuffer name) { List<ByteBuffer> l = new ArrayList<ByteBuffer>(); ByteBuffer bb = name.duplicate(); readIsStatic(bb); int i = 0; while (bb.remaining() > 0) { getComparator(i++, bb); l.add(ByteBufferUtil.readBytesWithShortLength(bb)); bb.get(); // skip end-of-component } return l.toArray(new ByteBuffer[l.size()]); } /* * Escapes all occurences of the ':' character from the input, replacing them by "\:". * Furthermore, if the last character is '\' or '!', a '!' is appended. */ public static String escape(String input) { if (input.isEmpty()) return input; String res = input.replaceAll(":", "\\\\:"); char last = res.charAt(res.length() - 1); return last == '\\' || last == '!' ? res + '!' : res; } /* * Reverses the effect of espace(). * Replaces all occurences of "\:" by ":" and remove last character if it is '!'. */ static String unescape(String input) { if (input.isEmpty()) return input; String res = input.replaceAll("\\\\:", ":"); char last = res.charAt(res.length() - 1); return last == '!' ? res.substring(0, res.length() - 1) : res; } /* * Split the input on character ':', unless the previous character is '\'. */ static List<String> split(String input) { if (input.isEmpty()) return Collections.<String>emptyList(); List<String> res = new ArrayList<String>(); int prev = 0; for (int i = 0; i < input.length(); i++) { if (input.charAt(i) != ':' || (i > 0 && input.charAt(i-1) == '\\')) continue; res.add(input.substring(prev, i)); prev = i + 1; } res.add(input.substring(prev, input.length())); return res; } public String getString(ByteBuffer bytes) { StringBuilder sb = new StringBuilder(); ByteBuffer bb = bytes.duplicate(); readIsStatic(bb); int i = 0; while (bb.remaining() > 0) { if (bb.remaining() != bytes.remaining()) sb.append(":"); AbstractType<?> comparator = getAndAppendComparator(i, bb, sb); ByteBuffer value = ByteBufferUtil.readBytesWithShortLength(bb); sb.append(escape(comparator.getString(value))); byte b = bb.get(); if (b != 0) { sb.append(b < 0 ? ":_" : ":!"); break; } ++i; } return sb.toString(); } public ByteBuffer fromString(String source) { List<String> parts = split(source); List<ByteBuffer> components = new ArrayList<ByteBuffer>(parts.size()); List<ParsedComparator> comparators = new ArrayList<ParsedComparator>(parts.size()); int totalLength = 0, i = 0; boolean lastByteIsOne = false; boolean lastByteIsMinusOne = false; for (String part : parts) { if (part.equals("!")) { lastByteIsOne = true; break; } else if (part.equals("_")) { lastByteIsMinusOne = true; break; } ParsedComparator p = parseComparator(i, part); AbstractType<?> type = p.getAbstractType(); part = p.getRemainingPart(); ByteBuffer component = type.fromString(unescape(part)); totalLength += p.getComparatorSerializedSize() + 2 + component.remaining() + 1; components.add(component); comparators.add(p); ++i; } ByteBuffer bb = ByteBuffer.allocate(totalLength); i = 0; for (ByteBuffer component : components) { comparators.get(i).serializeComparator(bb); ByteBufferUtil.writeShortLength(bb, component.remaining()); bb.put(component); // it's ok to consume component as we won't use it anymore bb.put((byte)0); ++i; } if (lastByteIsOne) bb.put(bb.limit() - 1, (byte)1); else if (lastByteIsMinusOne) bb.put(bb.limit() - 1, (byte)-1); bb.rewind(); return bb; } @Override public Term fromJSONObject(Object parsed) { throw new UnsupportedOperationException(); } @Override public String toJSONString(ByteBuffer buffer, int protocolVersion) { throw new UnsupportedOperationException(); } @Override public void validate(ByteBuffer bytes) throws MarshalException { ByteBuffer bb = bytes.duplicate(); readIsStatic(bb); int i = 0; ByteBuffer previous = null; while (bb.remaining() > 0) { AbstractType<?> comparator = validateComparator(i, bb); if (bb.remaining() < 2) throw new MarshalException("Not enough bytes to read value size of component " + i); int length = ByteBufferUtil.readShortLength(bb); if (bb.remaining() < length) throw new MarshalException("Not enough bytes to read value of component " + i); ByteBuffer value = ByteBufferUtil.readBytes(bb, length); comparator.validateCollectionMember(value, previous); if (bb.remaining() == 0) throw new MarshalException("Not enough bytes to read the end-of-component byte of component" + i); byte b = bb.get(); if (b != 0 && bb.remaining() != 0) throw new MarshalException("Invalid bytes remaining after an end-of-component at component" + i); previous = value; ++i; } } public abstract ByteBuffer decompose(Object... objects); public TypeSerializer<ByteBuffer> getSerializer() { return BytesSerializer.instance; } /** * @return the comparator for the given component. static CompositeType will consult * @param i DynamicCompositeType will read the type information from @param bb * @param bb name of type definition */ abstract protected AbstractType<?> getComparator(int i, ByteBuffer bb); /** * Adds DynamicCompositeType type information from @param bb1 to @param bb2. * @param i is ignored. */ abstract protected AbstractType<?> getComparator(int i, ByteBuffer bb1, ByteBuffer bb2); /** * Adds type information from @param bb to @param sb. @param i is ignored. */ abstract protected AbstractType<?> getAndAppendComparator(int i, ByteBuffer bb, StringBuilder sb); /** * Like getComparator, but validates that @param i does not exceed the defined range */ abstract protected AbstractType<?> validateComparator(int i, ByteBuffer bb) throws MarshalException; /** * Used by fromString */ abstract protected ParsedComparator parseComparator(int i, String part); protected static interface ParsedComparator { AbstractType<?> getAbstractType(); String getRemainingPart(); int getComparatorSerializedSize(); void serializeComparator(ByteBuffer bb); } }
/* * Copyright 2014-2015 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific language * governing permissions and limitations under the License. */ package org.docksidestage.dockside.dbflute.cbean.cq.bs; import java.util.*; import org.dbflute.cbean.*; import org.dbflute.cbean.chelper.*; import org.dbflute.cbean.ckey.*; import org.dbflute.cbean.coption.*; import org.dbflute.cbean.cvalue.ConditionValue; import org.dbflute.cbean.ordering.*; import org.dbflute.cbean.scoping.*; import org.dbflute.cbean.sqlclause.SqlClause; import org.dbflute.dbmeta.DBMetaProvider; import org.docksidestage.dockside.dbflute.allcommon.*; import org.docksidestage.dockside.dbflute.cbean.*; import org.docksidestage.dockside.dbflute.cbean.cq.*; /** * The abstract condition-query of VENDOR_THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF. * @author DBFlute(AutoGenerator) */ public abstract class AbstractBsVendorTheLongAndWindingTableAndColumnRefCQ extends AbstractConditionQuery { // =================================================================================== // Constructor // =========== public AbstractBsVendorTheLongAndWindingTableAndColumnRefCQ(ConditionQuery referrerQuery, SqlClause sqlClause, String aliasName, int nestLevel) { super(referrerQuery, sqlClause, aliasName, nestLevel); } // =================================================================================== // DB Meta // ======= @Override protected DBMetaProvider xgetDBMetaProvider() { return DBMetaInstanceHandler.getProvider(); } public String asTableDbName() { return "VENDOR_THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF"; } // =================================================================================== // Query // ===== /** * Equal(=). And NullIgnored, OnlyOnceRegistered. <br> * THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF_ID: {PK, NotNull, BIGINT(19)} * @param theLongAndWindingTableAndColumnRefId The value of theLongAndWindingTableAndColumnRefId as equal. (basically NotNull: error as default, or no condition as option) */ public void setTheLongAndWindingTableAndColumnRefId_Equal(Long theLongAndWindingTableAndColumnRefId) { doSetTheLongAndWindingTableAndColumnRefId_Equal(theLongAndWindingTableAndColumnRefId); } protected void doSetTheLongAndWindingTableAndColumnRefId_Equal(Long theLongAndWindingTableAndColumnRefId) { regTheLongAndWindingTableAndColumnRefId(CK_EQ, theLongAndWindingTableAndColumnRefId); } /** * NotEqual(&lt;&gt;). And NullIgnored, OnlyOnceRegistered. <br> * THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF_ID: {PK, NotNull, BIGINT(19)} * @param theLongAndWindingTableAndColumnRefId The value of theLongAndWindingTableAndColumnRefId as notEqual. (basically NotNull: error as default, or no condition as option) */ public void setTheLongAndWindingTableAndColumnRefId_NotEqual(Long theLongAndWindingTableAndColumnRefId) { doSetTheLongAndWindingTableAndColumnRefId_NotEqual(theLongAndWindingTableAndColumnRefId); } protected void doSetTheLongAndWindingTableAndColumnRefId_NotEqual(Long theLongAndWindingTableAndColumnRefId) { regTheLongAndWindingTableAndColumnRefId(CK_NES, theLongAndWindingTableAndColumnRefId); } /** * GreaterThan(&gt;). And NullIgnored, OnlyOnceRegistered. <br> * THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF_ID: {PK, NotNull, BIGINT(19)} * @param theLongAndWindingTableAndColumnRefId The value of theLongAndWindingTableAndColumnRefId as greaterThan. (basically NotNull: error as default, or no condition as option) */ public void setTheLongAndWindingTableAndColumnRefId_GreaterThan(Long theLongAndWindingTableAndColumnRefId) { regTheLongAndWindingTableAndColumnRefId(CK_GT, theLongAndWindingTableAndColumnRefId); } /** * LessThan(&lt;). And NullIgnored, OnlyOnceRegistered. <br> * THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF_ID: {PK, NotNull, BIGINT(19)} * @param theLongAndWindingTableAndColumnRefId The value of theLongAndWindingTableAndColumnRefId as lessThan. (basically NotNull: error as default, or no condition as option) */ public void setTheLongAndWindingTableAndColumnRefId_LessThan(Long theLongAndWindingTableAndColumnRefId) { regTheLongAndWindingTableAndColumnRefId(CK_LT, theLongAndWindingTableAndColumnRefId); } /** * GreaterEqual(&gt;=). And NullIgnored, OnlyOnceRegistered. <br> * THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF_ID: {PK, NotNull, BIGINT(19)} * @param theLongAndWindingTableAndColumnRefId The value of theLongAndWindingTableAndColumnRefId as greaterEqual. (basically NotNull: error as default, or no condition as option) */ public void setTheLongAndWindingTableAndColumnRefId_GreaterEqual(Long theLongAndWindingTableAndColumnRefId) { regTheLongAndWindingTableAndColumnRefId(CK_GE, theLongAndWindingTableAndColumnRefId); } /** * LessEqual(&lt;=). And NullIgnored, OnlyOnceRegistered. <br> * THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF_ID: {PK, NotNull, BIGINT(19)} * @param theLongAndWindingTableAndColumnRefId The value of theLongAndWindingTableAndColumnRefId as lessEqual. (basically NotNull: error as default, or no condition as option) */ public void setTheLongAndWindingTableAndColumnRefId_LessEqual(Long theLongAndWindingTableAndColumnRefId) { regTheLongAndWindingTableAndColumnRefId(CK_LE, theLongAndWindingTableAndColumnRefId); } /** * RangeOf with various options. (versatile) <br> * {(default) minNumber &lt;= column &lt;= maxNumber} <br> * And NullIgnored, OnlyOnceRegistered. <br> * THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF_ID: {PK, NotNull, BIGINT(19)} * @param minNumber The min number of theLongAndWindingTableAndColumnRefId. (basically NotNull: if op.allowOneSide(), null allowed) * @param maxNumber The max number of theLongAndWindingTableAndColumnRefId. (basically NotNull: if op.allowOneSide(), null allowed) * @param opLambda The callback for option of range-of. (NotNull) */ public void setTheLongAndWindingTableAndColumnRefId_RangeOf(Long minNumber, Long maxNumber, ConditionOptionCall<RangeOfOption> opLambda) { setTheLongAndWindingTableAndColumnRefId_RangeOf(minNumber, maxNumber, xcROOP(opLambda)); } /** * RangeOf with various options. (versatile) <br> * {(default) minNumber &lt;= column &lt;= maxNumber} <br> * And NullIgnored, OnlyOnceRegistered. <br> * THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF_ID: {PK, NotNull, BIGINT(19)} * @param minNumber The min number of theLongAndWindingTableAndColumnRefId. (basically NotNull: if op.allowOneSide(), null allowed) * @param maxNumber The max number of theLongAndWindingTableAndColumnRefId. (basically NotNull: if op.allowOneSide(), null allowed) * @param rangeOfOption The option of range-of. (NotNull) */ protected void setTheLongAndWindingTableAndColumnRefId_RangeOf(Long minNumber, Long maxNumber, RangeOfOption rangeOfOption) { regROO(minNumber, maxNumber, xgetCValueTheLongAndWindingTableAndColumnRefId(), "THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF_ID", rangeOfOption); } /** * InScope {in (1, 2)}. And NullIgnored, NullElementIgnored, SeveralRegistered. <br> * THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF_ID: {PK, NotNull, BIGINT(19)} * @param theLongAndWindingTableAndColumnRefIdList The collection of theLongAndWindingTableAndColumnRefId as inScope. (basically NotNull, NotEmpty: error as default, or no condition as option) */ public void setTheLongAndWindingTableAndColumnRefId_InScope(Collection<Long> theLongAndWindingTableAndColumnRefIdList) { doSetTheLongAndWindingTableAndColumnRefId_InScope(theLongAndWindingTableAndColumnRefIdList); } protected void doSetTheLongAndWindingTableAndColumnRefId_InScope(Collection<Long> theLongAndWindingTableAndColumnRefIdList) { regINS(CK_INS, cTL(theLongAndWindingTableAndColumnRefIdList), xgetCValueTheLongAndWindingTableAndColumnRefId(), "THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF_ID"); } /** * NotInScope {not in (1, 2)}. And NullIgnored, NullElementIgnored, SeveralRegistered. <br> * THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF_ID: {PK, NotNull, BIGINT(19)} * @param theLongAndWindingTableAndColumnRefIdList The collection of theLongAndWindingTableAndColumnRefId as notInScope. (basically NotNull, NotEmpty: error as default, or no condition as option) */ public void setTheLongAndWindingTableAndColumnRefId_NotInScope(Collection<Long> theLongAndWindingTableAndColumnRefIdList) { doSetTheLongAndWindingTableAndColumnRefId_NotInScope(theLongAndWindingTableAndColumnRefIdList); } protected void doSetTheLongAndWindingTableAndColumnRefId_NotInScope(Collection<Long> theLongAndWindingTableAndColumnRefIdList) { regINS(CK_NINS, cTL(theLongAndWindingTableAndColumnRefIdList), xgetCValueTheLongAndWindingTableAndColumnRefId(), "THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF_ID"); } /** * IsNull {is null}. And OnlyOnceRegistered. <br> * THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF_ID: {PK, NotNull, BIGINT(19)} */ public void setTheLongAndWindingTableAndColumnRefId_IsNull() { regTheLongAndWindingTableAndColumnRefId(CK_ISN, DOBJ); } /** * IsNotNull {is not null}. And OnlyOnceRegistered. <br> * THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF_ID: {PK, NotNull, BIGINT(19)} */ public void setTheLongAndWindingTableAndColumnRefId_IsNotNull() { regTheLongAndWindingTableAndColumnRefId(CK_ISNN, DOBJ); } protected void regTheLongAndWindingTableAndColumnRefId(ConditionKey ky, Object vl) { regQ(ky, vl, xgetCValueTheLongAndWindingTableAndColumnRefId(), "THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF_ID"); } protected abstract ConditionValue xgetCValueTheLongAndWindingTableAndColumnRefId(); /** * Equal(=). And NullIgnored, OnlyOnceRegistered. <br> * THE_LONG_AND_WINDING_TABLE_AND_COLUMN_ID: {IX, NotNull, BIGINT(19), FK to VENDOR_THE_LONG_AND_WINDING_TABLE_AND_COLUMN} * @param theLongAndWindingTableAndColumnId The value of theLongAndWindingTableAndColumnId as equal. (basically NotNull: error as default, or no condition as option) */ public void setTheLongAndWindingTableAndColumnId_Equal(Long theLongAndWindingTableAndColumnId) { doSetTheLongAndWindingTableAndColumnId_Equal(theLongAndWindingTableAndColumnId); } protected void doSetTheLongAndWindingTableAndColumnId_Equal(Long theLongAndWindingTableAndColumnId) { regTheLongAndWindingTableAndColumnId(CK_EQ, theLongAndWindingTableAndColumnId); } /** * NotEqual(&lt;&gt;). And NullIgnored, OnlyOnceRegistered. <br> * THE_LONG_AND_WINDING_TABLE_AND_COLUMN_ID: {IX, NotNull, BIGINT(19), FK to VENDOR_THE_LONG_AND_WINDING_TABLE_AND_COLUMN} * @param theLongAndWindingTableAndColumnId The value of theLongAndWindingTableAndColumnId as notEqual. (basically NotNull: error as default, or no condition as option) */ public void setTheLongAndWindingTableAndColumnId_NotEqual(Long theLongAndWindingTableAndColumnId) { doSetTheLongAndWindingTableAndColumnId_NotEqual(theLongAndWindingTableAndColumnId); } protected void doSetTheLongAndWindingTableAndColumnId_NotEqual(Long theLongAndWindingTableAndColumnId) { regTheLongAndWindingTableAndColumnId(CK_NES, theLongAndWindingTableAndColumnId); } /** * GreaterThan(&gt;). And NullIgnored, OnlyOnceRegistered. <br> * THE_LONG_AND_WINDING_TABLE_AND_COLUMN_ID: {IX, NotNull, BIGINT(19), FK to VENDOR_THE_LONG_AND_WINDING_TABLE_AND_COLUMN} * @param theLongAndWindingTableAndColumnId The value of theLongAndWindingTableAndColumnId as greaterThan. (basically NotNull: error as default, or no condition as option) */ public void setTheLongAndWindingTableAndColumnId_GreaterThan(Long theLongAndWindingTableAndColumnId) { regTheLongAndWindingTableAndColumnId(CK_GT, theLongAndWindingTableAndColumnId); } /** * LessThan(&lt;). And NullIgnored, OnlyOnceRegistered. <br> * THE_LONG_AND_WINDING_TABLE_AND_COLUMN_ID: {IX, NotNull, BIGINT(19), FK to VENDOR_THE_LONG_AND_WINDING_TABLE_AND_COLUMN} * @param theLongAndWindingTableAndColumnId The value of theLongAndWindingTableAndColumnId as lessThan. (basically NotNull: error as default, or no condition as option) */ public void setTheLongAndWindingTableAndColumnId_LessThan(Long theLongAndWindingTableAndColumnId) { regTheLongAndWindingTableAndColumnId(CK_LT, theLongAndWindingTableAndColumnId); } /** * GreaterEqual(&gt;=). And NullIgnored, OnlyOnceRegistered. <br> * THE_LONG_AND_WINDING_TABLE_AND_COLUMN_ID: {IX, NotNull, BIGINT(19), FK to VENDOR_THE_LONG_AND_WINDING_TABLE_AND_COLUMN} * @param theLongAndWindingTableAndColumnId The value of theLongAndWindingTableAndColumnId as greaterEqual. (basically NotNull: error as default, or no condition as option) */ public void setTheLongAndWindingTableAndColumnId_GreaterEqual(Long theLongAndWindingTableAndColumnId) { regTheLongAndWindingTableAndColumnId(CK_GE, theLongAndWindingTableAndColumnId); } /** * LessEqual(&lt;=). And NullIgnored, OnlyOnceRegistered. <br> * THE_LONG_AND_WINDING_TABLE_AND_COLUMN_ID: {IX, NotNull, BIGINT(19), FK to VENDOR_THE_LONG_AND_WINDING_TABLE_AND_COLUMN} * @param theLongAndWindingTableAndColumnId The value of theLongAndWindingTableAndColumnId as lessEqual. (basically NotNull: error as default, or no condition as option) */ public void setTheLongAndWindingTableAndColumnId_LessEqual(Long theLongAndWindingTableAndColumnId) { regTheLongAndWindingTableAndColumnId(CK_LE, theLongAndWindingTableAndColumnId); } /** * RangeOf with various options. (versatile) <br> * {(default) minNumber &lt;= column &lt;= maxNumber} <br> * And NullIgnored, OnlyOnceRegistered. <br> * THE_LONG_AND_WINDING_TABLE_AND_COLUMN_ID: {IX, NotNull, BIGINT(19), FK to VENDOR_THE_LONG_AND_WINDING_TABLE_AND_COLUMN} * @param minNumber The min number of theLongAndWindingTableAndColumnId. (basically NotNull: if op.allowOneSide(), null allowed) * @param maxNumber The max number of theLongAndWindingTableAndColumnId. (basically NotNull: if op.allowOneSide(), null allowed) * @param opLambda The callback for option of range-of. (NotNull) */ public void setTheLongAndWindingTableAndColumnId_RangeOf(Long minNumber, Long maxNumber, ConditionOptionCall<RangeOfOption> opLambda) { setTheLongAndWindingTableAndColumnId_RangeOf(minNumber, maxNumber, xcROOP(opLambda)); } /** * RangeOf with various options. (versatile) <br> * {(default) minNumber &lt;= column &lt;= maxNumber} <br> * And NullIgnored, OnlyOnceRegistered. <br> * THE_LONG_AND_WINDING_TABLE_AND_COLUMN_ID: {IX, NotNull, BIGINT(19), FK to VENDOR_THE_LONG_AND_WINDING_TABLE_AND_COLUMN} * @param minNumber The min number of theLongAndWindingTableAndColumnId. (basically NotNull: if op.allowOneSide(), null allowed) * @param maxNumber The max number of theLongAndWindingTableAndColumnId. (basically NotNull: if op.allowOneSide(), null allowed) * @param rangeOfOption The option of range-of. (NotNull) */ protected void setTheLongAndWindingTableAndColumnId_RangeOf(Long minNumber, Long maxNumber, RangeOfOption rangeOfOption) { regROO(minNumber, maxNumber, xgetCValueTheLongAndWindingTableAndColumnId(), "THE_LONG_AND_WINDING_TABLE_AND_COLUMN_ID", rangeOfOption); } /** * InScope {in (1, 2)}. And NullIgnored, NullElementIgnored, SeveralRegistered. <br> * THE_LONG_AND_WINDING_TABLE_AND_COLUMN_ID: {IX, NotNull, BIGINT(19), FK to VENDOR_THE_LONG_AND_WINDING_TABLE_AND_COLUMN} * @param theLongAndWindingTableAndColumnIdList The collection of theLongAndWindingTableAndColumnId as inScope. (basically NotNull, NotEmpty: error as default, or no condition as option) */ public void setTheLongAndWindingTableAndColumnId_InScope(Collection<Long> theLongAndWindingTableAndColumnIdList) { doSetTheLongAndWindingTableAndColumnId_InScope(theLongAndWindingTableAndColumnIdList); } protected void doSetTheLongAndWindingTableAndColumnId_InScope(Collection<Long> theLongAndWindingTableAndColumnIdList) { regINS(CK_INS, cTL(theLongAndWindingTableAndColumnIdList), xgetCValueTheLongAndWindingTableAndColumnId(), "THE_LONG_AND_WINDING_TABLE_AND_COLUMN_ID"); } /** * NotInScope {not in (1, 2)}. And NullIgnored, NullElementIgnored, SeveralRegistered. <br> * THE_LONG_AND_WINDING_TABLE_AND_COLUMN_ID: {IX, NotNull, BIGINT(19), FK to VENDOR_THE_LONG_AND_WINDING_TABLE_AND_COLUMN} * @param theLongAndWindingTableAndColumnIdList The collection of theLongAndWindingTableAndColumnId as notInScope. (basically NotNull, NotEmpty: error as default, or no condition as option) */ public void setTheLongAndWindingTableAndColumnId_NotInScope(Collection<Long> theLongAndWindingTableAndColumnIdList) { doSetTheLongAndWindingTableAndColumnId_NotInScope(theLongAndWindingTableAndColumnIdList); } protected void doSetTheLongAndWindingTableAndColumnId_NotInScope(Collection<Long> theLongAndWindingTableAndColumnIdList) { regINS(CK_NINS, cTL(theLongAndWindingTableAndColumnIdList), xgetCValueTheLongAndWindingTableAndColumnId(), "THE_LONG_AND_WINDING_TABLE_AND_COLUMN_ID"); } protected void regTheLongAndWindingTableAndColumnId(ConditionKey ky, Object vl) { regQ(ky, vl, xgetCValueTheLongAndWindingTableAndColumnId(), "THE_LONG_AND_WINDING_TABLE_AND_COLUMN_ID"); } protected abstract ConditionValue xgetCValueTheLongAndWindingTableAndColumnId(); /** * Equal(=). And NullIgnored, OnlyOnceRegistered. <br> * THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF_DATE: {NotNull, DATE(10)} * @param theLongAndWindingTableAndColumnRefDate The value of theLongAndWindingTableAndColumnRefDate as equal. (basically NotNull: error as default, or no condition as option) */ public void setTheLongAndWindingTableAndColumnRefDate_Equal(java.time.LocalDate theLongAndWindingTableAndColumnRefDate) { regTheLongAndWindingTableAndColumnRefDate(CK_EQ, theLongAndWindingTableAndColumnRefDate); } /** * GreaterThan(&gt;). And NullIgnored, OnlyOnceRegistered. <br> * THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF_DATE: {NotNull, DATE(10)} * @param theLongAndWindingTableAndColumnRefDate The value of theLongAndWindingTableAndColumnRefDate as greaterThan. (basically NotNull: error as default, or no condition as option) */ public void setTheLongAndWindingTableAndColumnRefDate_GreaterThan(java.time.LocalDate theLongAndWindingTableAndColumnRefDate) { regTheLongAndWindingTableAndColumnRefDate(CK_GT, theLongAndWindingTableAndColumnRefDate); } /** * LessThan(&lt;). And NullIgnored, OnlyOnceRegistered. <br> * THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF_DATE: {NotNull, DATE(10)} * @param theLongAndWindingTableAndColumnRefDate The value of theLongAndWindingTableAndColumnRefDate as lessThan. (basically NotNull: error as default, or no condition as option) */ public void setTheLongAndWindingTableAndColumnRefDate_LessThan(java.time.LocalDate theLongAndWindingTableAndColumnRefDate) { regTheLongAndWindingTableAndColumnRefDate(CK_LT, theLongAndWindingTableAndColumnRefDate); } /** * GreaterEqual(&gt;=). And NullIgnored, OnlyOnceRegistered. <br> * THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF_DATE: {NotNull, DATE(10)} * @param theLongAndWindingTableAndColumnRefDate The value of theLongAndWindingTableAndColumnRefDate as greaterEqual. (basically NotNull: error as default, or no condition as option) */ public void setTheLongAndWindingTableAndColumnRefDate_GreaterEqual(java.time.LocalDate theLongAndWindingTableAndColumnRefDate) { regTheLongAndWindingTableAndColumnRefDate(CK_GE, theLongAndWindingTableAndColumnRefDate); } /** * LessEqual(&lt;=). And NullIgnored, OnlyOnceRegistered. <br> * THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF_DATE: {NotNull, DATE(10)} * @param theLongAndWindingTableAndColumnRefDate The value of theLongAndWindingTableAndColumnRefDate as lessEqual. (basically NotNull: error as default, or no condition as option) */ public void setTheLongAndWindingTableAndColumnRefDate_LessEqual(java.time.LocalDate theLongAndWindingTableAndColumnRefDate) { regTheLongAndWindingTableAndColumnRefDate(CK_LE, theLongAndWindingTableAndColumnRefDate); } /** * FromTo with various options. (versatile) {(default) fromDatetime &lt;= column &lt;= toDatetime} <br> * And NullIgnored, OnlyOnceRegistered. <br> * THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF_DATE: {NotNull, DATE(10)} * <pre>e.g. setTheLongAndWindingTableAndColumnRefDate_FromTo(fromDate, toDate, op <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> op.<span style="color: #CC4747">compareAsDate()</span>);</pre> * @param fromDatetime The from-datetime(yyyy/MM/dd HH:mm:ss.SSS) of theLongAndWindingTableAndColumnRefDate. (basically NotNull: if op.allowOneSide(), null allowed) * @param toDatetime The to-datetime(yyyy/MM/dd HH:mm:ss.SSS) of theLongAndWindingTableAndColumnRefDate. (basically NotNull: if op.allowOneSide(), null allowed) * @param opLambda The callback for option of from-to. (NotNull) */ public void setTheLongAndWindingTableAndColumnRefDate_FromTo(java.time.LocalDate fromDatetime, java.time.LocalDate toDatetime, ConditionOptionCall<FromToOption> opLambda) { setTheLongAndWindingTableAndColumnRefDate_FromTo(fromDatetime, toDatetime, xcFTOP(opLambda)); } /** * FromTo with various options. (versatile) {(default) fromDatetime &lt;= column &lt;= toDatetime} <br> * And NullIgnored, OnlyOnceRegistered. <br> * THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF_DATE: {NotNull, DATE(10)} * <pre>e.g. setTheLongAndWindingTableAndColumnRefDate_FromTo(fromDate, toDate, new <span style="color: #CC4747">FromToOption</span>().compareAsDate());</pre> * @param fromDatetime The from-datetime(yyyy/MM/dd HH:mm:ss.SSS) of theLongAndWindingTableAndColumnRefDate. (basically NotNull: if op.allowOneSide(), null allowed) * @param toDatetime The to-datetime(yyyy/MM/dd HH:mm:ss.SSS) of theLongAndWindingTableAndColumnRefDate. (basically NotNull: if op.allowOneSide(), null allowed) * @param fromToOption The option of from-to. (NotNull) */ protected void setTheLongAndWindingTableAndColumnRefDate_FromTo(java.time.LocalDate fromDatetime, java.time.LocalDate toDatetime, FromToOption fromToOption) { String nm = "THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF_DATE"; FromToOption op = fromToOption; regFTQ(xfFTHD(fromDatetime, nm, op), xfFTHD(toDatetime, nm, op), xgetCValueTheLongAndWindingTableAndColumnRefDate(), nm, op); } protected void regTheLongAndWindingTableAndColumnRefDate(ConditionKey ky, Object vl) { regQ(ky, vl, xgetCValueTheLongAndWindingTableAndColumnRefDate(), "THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF_DATE"); } protected abstract ConditionValue xgetCValueTheLongAndWindingTableAndColumnRefDate(); /** * Equal(=). And NullIgnored, OnlyOnceRegistered. <br> * SHORT_DATE: {NotNull, DATE(10)} * @param shortDate The value of shortDate as equal. (basically NotNull: error as default, or no condition as option) */ public void setShortDate_Equal(java.time.LocalDate shortDate) { regShortDate(CK_EQ, shortDate); } /** * GreaterThan(&gt;). And NullIgnored, OnlyOnceRegistered. <br> * SHORT_DATE: {NotNull, DATE(10)} * @param shortDate The value of shortDate as greaterThan. (basically NotNull: error as default, or no condition as option) */ public void setShortDate_GreaterThan(java.time.LocalDate shortDate) { regShortDate(CK_GT, shortDate); } /** * LessThan(&lt;). And NullIgnored, OnlyOnceRegistered. <br> * SHORT_DATE: {NotNull, DATE(10)} * @param shortDate The value of shortDate as lessThan. (basically NotNull: error as default, or no condition as option) */ public void setShortDate_LessThan(java.time.LocalDate shortDate) { regShortDate(CK_LT, shortDate); } /** * GreaterEqual(&gt;=). And NullIgnored, OnlyOnceRegistered. <br> * SHORT_DATE: {NotNull, DATE(10)} * @param shortDate The value of shortDate as greaterEqual. (basically NotNull: error as default, or no condition as option) */ public void setShortDate_GreaterEqual(java.time.LocalDate shortDate) { regShortDate(CK_GE, shortDate); } /** * LessEqual(&lt;=). And NullIgnored, OnlyOnceRegistered. <br> * SHORT_DATE: {NotNull, DATE(10)} * @param shortDate The value of shortDate as lessEqual. (basically NotNull: error as default, or no condition as option) */ public void setShortDate_LessEqual(java.time.LocalDate shortDate) { regShortDate(CK_LE, shortDate); } /** * FromTo with various options. (versatile) {(default) fromDatetime &lt;= column &lt;= toDatetime} <br> * And NullIgnored, OnlyOnceRegistered. <br> * SHORT_DATE: {NotNull, DATE(10)} * <pre>e.g. setShortDate_FromTo(fromDate, toDate, op <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> op.<span style="color: #CC4747">compareAsDate()</span>);</pre> * @param fromDatetime The from-datetime(yyyy/MM/dd HH:mm:ss.SSS) of shortDate. (basically NotNull: if op.allowOneSide(), null allowed) * @param toDatetime The to-datetime(yyyy/MM/dd HH:mm:ss.SSS) of shortDate. (basically NotNull: if op.allowOneSide(), null allowed) * @param opLambda The callback for option of from-to. (NotNull) */ public void setShortDate_FromTo(java.time.LocalDate fromDatetime, java.time.LocalDate toDatetime, ConditionOptionCall<FromToOption> opLambda) { setShortDate_FromTo(fromDatetime, toDatetime, xcFTOP(opLambda)); } /** * FromTo with various options. (versatile) {(default) fromDatetime &lt;= column &lt;= toDatetime} <br> * And NullIgnored, OnlyOnceRegistered. <br> * SHORT_DATE: {NotNull, DATE(10)} * <pre>e.g. setShortDate_FromTo(fromDate, toDate, new <span style="color: #CC4747">FromToOption</span>().compareAsDate());</pre> * @param fromDatetime The from-datetime(yyyy/MM/dd HH:mm:ss.SSS) of shortDate. (basically NotNull: if op.allowOneSide(), null allowed) * @param toDatetime The to-datetime(yyyy/MM/dd HH:mm:ss.SSS) of shortDate. (basically NotNull: if op.allowOneSide(), null allowed) * @param fromToOption The option of from-to. (NotNull) */ protected void setShortDate_FromTo(java.time.LocalDate fromDatetime, java.time.LocalDate toDatetime, FromToOption fromToOption) { String nm = "SHORT_DATE"; FromToOption op = fromToOption; regFTQ(xfFTHD(fromDatetime, nm, op), xfFTHD(toDatetime, nm, op), xgetCValueShortDate(), nm, op); } protected void regShortDate(ConditionKey ky, Object vl) { regQ(ky, vl, xgetCValueShortDate(), "SHORT_DATE"); } protected abstract ConditionValue xgetCValueShortDate(); // =================================================================================== // ScalarCondition // =============== /** * Prepare ScalarCondition as equal. <br> * {where FOO = (select max(BAR) from ...)} * <pre> * cb.query().scalar_Equal().<span style="color: #CC4747">avg</span>(<span style="color: #553000">purchaseCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #553000">purchaseCB</span>.specify().<span style="color: #CC4747">columnPurchasePrice</span>(); <span style="color: #3F7E5E">// *Point!</span> * <span style="color: #553000">purchaseCB</span>.query().setPaymentCompleteFlg_Equal_True(); * }); * </pre> * @return The object to set up a function. (NotNull) */ public HpSLCFunction<VendorTheLongAndWindingTableAndColumnRefCB> scalar_Equal() { return xcreateSLCFunction(CK_EQ, VendorTheLongAndWindingTableAndColumnRefCB.class); } /** * Prepare ScalarCondition as equal. <br> * {where FOO &lt;&gt; (select max(BAR) from ...)} * <pre> * cb.query().scalar_Equal().<span style="color: #CC4747">avg</span>(<span style="color: #553000">purchaseCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #553000">purchaseCB</span>.specify().<span style="color: #CC4747">columnPurchasePrice</span>(); <span style="color: #3F7E5E">// *Point!</span> * <span style="color: #553000">purchaseCB</span>.query().setPaymentCompleteFlg_Equal_True(); * }); * </pre> * @return The object to set up a function. (NotNull) */ public HpSLCFunction<VendorTheLongAndWindingTableAndColumnRefCB> scalar_NotEqual() { return xcreateSLCFunction(CK_NES, VendorTheLongAndWindingTableAndColumnRefCB.class); } /** * Prepare ScalarCondition as greaterThan. <br> * {where FOO &gt; (select max(BAR) from ...)} * <pre> * cb.query().scalar_Equal().<span style="color: #CC4747">avg</span>(<span style="color: #553000">purchaseCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #553000">purchaseCB</span>.specify().<span style="color: #CC4747">columnPurchasePrice</span>(); <span style="color: #3F7E5E">// *Point!</span> * <span style="color: #553000">purchaseCB</span>.query().setPaymentCompleteFlg_Equal_True(); * }); * </pre> * @return The object to set up a function. (NotNull) */ public HpSLCFunction<VendorTheLongAndWindingTableAndColumnRefCB> scalar_GreaterThan() { return xcreateSLCFunction(CK_GT, VendorTheLongAndWindingTableAndColumnRefCB.class); } /** * Prepare ScalarCondition as lessThan. <br> * {where FOO &lt; (select max(BAR) from ...)} * <pre> * cb.query().scalar_Equal().<span style="color: #CC4747">avg</span>(<span style="color: #553000">purchaseCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #553000">purchaseCB</span>.specify().<span style="color: #CC4747">columnPurchasePrice</span>(); <span style="color: #3F7E5E">// *Point!</span> * <span style="color: #553000">purchaseCB</span>.query().setPaymentCompleteFlg_Equal_True(); * }); * </pre> * @return The object to set up a function. (NotNull) */ public HpSLCFunction<VendorTheLongAndWindingTableAndColumnRefCB> scalar_LessThan() { return xcreateSLCFunction(CK_LT, VendorTheLongAndWindingTableAndColumnRefCB.class); } /** * Prepare ScalarCondition as greaterEqual. <br> * {where FOO &gt;= (select max(BAR) from ...)} * <pre> * cb.query().scalar_Equal().<span style="color: #CC4747">avg</span>(<span style="color: #553000">purchaseCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #553000">purchaseCB</span>.specify().<span style="color: #CC4747">columnPurchasePrice</span>(); <span style="color: #3F7E5E">// *Point!</span> * <span style="color: #553000">purchaseCB</span>.query().setPaymentCompleteFlg_Equal_True(); * }); * </pre> * @return The object to set up a function. (NotNull) */ public HpSLCFunction<VendorTheLongAndWindingTableAndColumnRefCB> scalar_GreaterEqual() { return xcreateSLCFunction(CK_GE, VendorTheLongAndWindingTableAndColumnRefCB.class); } /** * Prepare ScalarCondition as lessEqual. <br> * {where FOO &lt;= (select max(BAR) from ...)} * <pre> * cb.query().<span style="color: #CC4747">scalar_LessEqual()</span>.max(new SubQuery&lt;VendorTheLongAndWindingTableAndColumnRefCB&gt;() { * public void query(VendorTheLongAndWindingTableAndColumnRefCB subCB) { * subCB.specify().setFoo... <span style="color: #3F7E5E">// derived column for function</span> * subCB.query().setBar... * } * }); * </pre> * @return The object to set up a function. (NotNull) */ public HpSLCFunction<VendorTheLongAndWindingTableAndColumnRefCB> scalar_LessEqual() { return xcreateSLCFunction(CK_LE, VendorTheLongAndWindingTableAndColumnRefCB.class); } @SuppressWarnings("unchecked") protected <CB extends ConditionBean> void xscalarCondition(String fn, SubQuery<CB> sq, String rd, HpSLCCustomized<CB> cs, ScalarConditionOption op) { assertObjectNotNull("subQuery", sq); VendorTheLongAndWindingTableAndColumnRefCB cb = xcreateScalarConditionCB(); sq.query((CB)cb); String pp = keepScalarCondition(cb.query()); // for saving query-value cs.setPartitionByCBean((CB)xcreateScalarConditionPartitionByCB()); // for using partition-by registerScalarCondition(fn, cb.query(), pp, rd, cs, op); } public abstract String keepScalarCondition(VendorTheLongAndWindingTableAndColumnRefCQ sq); protected VendorTheLongAndWindingTableAndColumnRefCB xcreateScalarConditionCB() { VendorTheLongAndWindingTableAndColumnRefCB cb = newMyCB(); cb.xsetupForScalarCondition(this); return cb; } protected VendorTheLongAndWindingTableAndColumnRefCB xcreateScalarConditionPartitionByCB() { VendorTheLongAndWindingTableAndColumnRefCB cb = newMyCB(); cb.xsetupForScalarConditionPartitionBy(this); return cb; } // =================================================================================== // MyselfDerived // ============= public void xsmyselfDerive(String fn, SubQuery<VendorTheLongAndWindingTableAndColumnRefCB> sq, String al, DerivedReferrerOption op) { assertObjectNotNull("subQuery", sq); VendorTheLongAndWindingTableAndColumnRefCB cb = new VendorTheLongAndWindingTableAndColumnRefCB(); cb.xsetupForDerivedReferrer(this); lockCall(() -> sq.query(cb)); String pp = keepSpecifyMyselfDerived(cb.query()); String pk = "THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF_ID"; registerSpecifyMyselfDerived(fn, cb.query(), pk, pk, pp, "myselfDerived", al, op); } public abstract String keepSpecifyMyselfDerived(VendorTheLongAndWindingTableAndColumnRefCQ sq); /** * Prepare for (Query)MyselfDerived (correlated sub-query). * @return The object to set up a function for myself table. (NotNull) */ public HpQDRFunction<VendorTheLongAndWindingTableAndColumnRefCB> myselfDerived() { return xcreateQDRFunctionMyselfDerived(VendorTheLongAndWindingTableAndColumnRefCB.class); } @SuppressWarnings("unchecked") protected <CB extends ConditionBean> void xqderiveMyselfDerived(String fn, SubQuery<CB> sq, String rd, Object vl, DerivedReferrerOption op) { assertObjectNotNull("subQuery", sq); VendorTheLongAndWindingTableAndColumnRefCB cb = new VendorTheLongAndWindingTableAndColumnRefCB(); cb.xsetupForDerivedReferrer(this); sq.query((CB)cb); String pk = "THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF_ID"; String sqpp = keepQueryMyselfDerived(cb.query()); // for saving query-value. String prpp = keepQueryMyselfDerivedParameter(vl); registerQueryMyselfDerived(fn, cb.query(), pk, pk, sqpp, "myselfDerived", rd, vl, prpp, op); } public abstract String keepQueryMyselfDerived(VendorTheLongAndWindingTableAndColumnRefCQ sq); public abstract String keepQueryMyselfDerivedParameter(Object vl); // =================================================================================== // MyselfExists // ============ /** * Prepare for MyselfExists (correlated sub-query). * @param subCBLambda The implementation of sub-query. (NotNull) */ public void myselfExists(SubQuery<VendorTheLongAndWindingTableAndColumnRefCB> subCBLambda) { assertObjectNotNull("subCBLambda", subCBLambda); VendorTheLongAndWindingTableAndColumnRefCB cb = new VendorTheLongAndWindingTableAndColumnRefCB(); cb.xsetupForMyselfExists(this); lockCall(() -> subCBLambda.query(cb)); String pp = keepMyselfExists(cb.query()); registerMyselfExists(cb.query(), pp); } public abstract String keepMyselfExists(VendorTheLongAndWindingTableAndColumnRefCQ sq); // =================================================================================== // Manual Order // ============ /** * Order along manual ordering information. * <pre> * cb.query().addOrderBy_Birthdate_Asc().<span style="color: #CC4747">withManualOrder</span>(<span style="color: #553000">op</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #553000">op</span>.<span style="color: #CC4747">when_GreaterEqual</span>(priorityDate); <span style="color: #3F7E5E">// e.g. 2000/01/01</span> * }); * <span style="color: #3F7E5E">// order by </span> * <span style="color: #3F7E5E">// case</span> * <span style="color: #3F7E5E">// when BIRTHDATE &gt;= '2000/01/01' then 0</span> * <span style="color: #3F7E5E">// else 1</span> * <span style="color: #3F7E5E">// end asc, ...</span> * * cb.query().addOrderBy_MemberStatusCode_Asc().<span style="color: #CC4747">withManualOrder</span>(<span style="color: #553000">op</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #553000">op</span>.<span style="color: #CC4747">when_Equal</span>(CDef.MemberStatus.Withdrawal); * <span style="color: #553000">op</span>.<span style="color: #CC4747">when_Equal</span>(CDef.MemberStatus.Formalized); * <span style="color: #553000">op</span>.<span style="color: #CC4747">when_Equal</span>(CDef.MemberStatus.Provisional); * }); * <span style="color: #3F7E5E">// order by </span> * <span style="color: #3F7E5E">// case</span> * <span style="color: #3F7E5E">// when MEMBER_STATUS_CODE = 'WDL' then 0</span> * <span style="color: #3F7E5E">// when MEMBER_STATUS_CODE = 'FML' then 1</span> * <span style="color: #3F7E5E">// when MEMBER_STATUS_CODE = 'PRV' then 2</span> * <span style="color: #3F7E5E">// else 3</span> * <span style="color: #3F7E5E">// end asc, ...</span> * </pre> * <p>This function with Union is unsupported!</p> * <p>The order values are bound (treated as bind parameter).</p> * @param opLambda The callback for option of manual-order containing order values. (NotNull) */ public void withManualOrder(ManualOrderOptionCall opLambda) { // is user public! xdoWithManualOrder(cMOO(opLambda)); } // =================================================================================== // Small Adjustment // ================ // =================================================================================== // Very Internal // ============= protected VendorTheLongAndWindingTableAndColumnRefCB newMyCB() { return new VendorTheLongAndWindingTableAndColumnRefCB(); } // very internal (for suppressing warn about 'Not Use Import') protected String xabUDT() { return Date.class.getName(); } protected String xabCQ() { return VendorTheLongAndWindingTableAndColumnRefCQ.class.getName(); } protected String xabLSO() { return LikeSearchOption.class.getName(); } protected String xabSLCS() { return HpSLCSetupper.class.getName(); } protected String xabSCP() { return SubQuery.class.getName(); } }
/* * Copyright 2013 Muthukumaran (https://github.com/muthuishere/). * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.sshutils.views.settings; import com.sshutils.beans.ConnectionInfo; import com.sshutils.beans.Settings; import com.sshutils.controller.SettingsController; import com.sshutils.utils.StringHelper; import java.awt.event.ActionEvent; import java.awt.event.KeyEvent; import java.net.URL; import java.util.logging.Level; import javax.swing.AbstractAction; import javax.swing.ActionMap; import javax.swing.DefaultListModel; import javax.swing.ImageIcon; import javax.swing.InputMap; import javax.swing.JComponent; import javax.swing.JList; import javax.swing.JOptionPane; import javax.swing.KeyStroke; import javax.swing.event.ListSelectionEvent; import javax.swing.event.ListSelectionListener; import org.apache.log4j.Logger; public class CredentialChange extends javax.swing.JDialog { private static final Logger log = Logger.getLogger(CredentialChange.class); /** * A return status code - returned if Cancel button has been pressed */ public static final int RET_CANCEL = 0; private DefaultListModel listServers = new DefaultListModel(); public DefaultListModel getListServers() { return listServers; } public void setListServers(DefaultListModel listServers) { this.listServers = listServers; } /** * A return status code - returned if OK button has been pressed */ public static final int RET_OK = 1; /** * Creates new form SettingsMain */ public CredentialChange(java.awt.Frame parent, boolean modal) { super(parent, modal); initComponents(); URL iconURL = getClass().getResource("/com/sshutils/resources/red/16x16/app.png"); // iconURL is null when not found ImageIcon icon = new ImageIcon(iconURL); this.setIconImage(icon.getImage()); this.setTitle("Credential-Change"); // Close the dialog when Esc is pressed String cancelName = "cancel"; InputMap inputMap = getRootPane().getInputMap(JComponent.WHEN_ANCESTOR_OF_FOCUSED_COMPONENT); inputMap.put(KeyStroke.getKeyStroke(KeyEvent.VK_ESCAPE, 0), cancelName); ActionMap actionMap = getRootPane().getActionMap(); actionMap.put(cancelName, new AbstractAction() { public void actionPerformed(ActionEvent e) { doClose(RET_CANCEL); } }); } private void showErrormsg(final String msg){ new Thread() { public void run() { try { lblError.setVisible(true); lblError.setText(msg); Thread.sleep(10000); hideLabels(); } catch (InterruptedException ex) { } } }.start(); } private void showSuccessmsg(final String msg){ new Thread() { public void run() { try { lblSuccess.setVisible(true); lblSuccess.setText(msg); Thread.sleep(10000); hideLabels(); } catch (InterruptedException ex) { } } }.start(); } private void hideLabels(){ lblSuccess.setVisible(false); lblError.setVisible(false); } public boolean confirmMsg(String msg, String title) { int dialogresult = JOptionPane.showConfirmDialog(this, msg, title, JOptionPane.YES_NO_OPTION); if (dialogresult == JOptionPane.YES_OPTION) { return true; } return false; } ListSelectionListener listSelectionListener = new ListSelectionListener() { public void valueChanged(ListSelectionEvent listSelectionEvent) { boolean adjust = listSelectionEvent.getValueIsAdjusting(); if (!adjust) { JList list = (JList) listSelectionEvent.getSource(); log.info("Selected value" + list.getSelectedValue()); boolean flgEnabled = (null == list.getSelectedValue()) ? false : true; //btnEdit.setEnabled(flgEnabled); //btnDelete.setEnabled(flgEnabled); } } }; /** * @return the return status of this dialog - one of RET_OK or RET_CANCEL */ public int getReturnStatus() { return returnStatus; } /** * This method is called from within the constructor to initialize the form. * WARNING: Do NOT modify this code. The content of this method is always * regenerated by the Form Editor. */ @SuppressWarnings("unchecked") // <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents private void initComponents() { okButton = new javax.swing.JButton(); jLabel1 = new javax.swing.JLabel(); cmbUsers = new javax.swing.JComboBox(); jLabel2 = new javax.swing.JLabel(); txtPassword = new javax.swing.JPasswordField(); btnUpdatePwd = new javax.swing.JButton(); btnClose = new javax.swing.JButton(); lblError = new javax.swing.JLabel(); lblSuccess = new javax.swing.JLabel(); addWindowListener(new java.awt.event.WindowAdapter() { public void windowClosing(java.awt.event.WindowEvent evt) { closeDialog(evt); } }); okButton.setMnemonic('o'); okButton.setText("OK"); okButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { okButtonActionPerformed(evt); } }); jLabel1.setText("Users"); cmbUsers.setModel(new javax.swing.DefaultComboBoxModel(new String[] { "Item 1", "Item 2", "Item 3", "Item 4" })); jLabel2.setText("Password"); txtPassword.setText("jPasswordField1"); btnUpdatePwd.setMnemonic('u'); btnUpdatePwd.setText("Update"); btnUpdatePwd.setToolTipText(""); btnClose.setText("Cancel"); btnClose.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { btnCloseActionPerformed(evt); } }); lblError.setFont(new java.awt.Font("Tahoma", 1, 10)); // NOI18N lblError.setForeground(new java.awt.Color(255, 0, 0)); lblError.setText("jLabel10"); lblSuccess.setFont(new java.awt.Font("Tahoma", 1, 10)); // NOI18N lblSuccess.setForeground(new java.awt.Color(0, 51, 0)); lblSuccess.setText("jLabel10"); javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane()); getContentPane().setLayout(layout); layout.setHorizontalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addGap(19, 19, 19) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(jLabel1) .addComponent(jLabel2)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false) .addComponent(cmbUsers, 0, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(txtPassword, javax.swing.GroupLayout.DEFAULT_SIZE, 210, Short.MAX_VALUE))) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup() .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(btnUpdatePwd))) .addGap(168, 168, 168)) .addGroup(layout.createSequentialGroup() .addContainerGap() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addComponent(lblSuccess, javax.swing.GroupLayout.PREFERRED_SIZE, 277, javax.swing.GroupLayout.PREFERRED_SIZE) .addGap(0, 0, Short.MAX_VALUE)) .addGroup(layout.createSequentialGroup() .addComponent(lblError, javax.swing.GroupLayout.PREFERRED_SIZE, 277, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(okButton, javax.swing.GroupLayout.PREFERRED_SIZE, 67, javax.swing.GroupLayout.PREFERRED_SIZE) .addGap(18, 18, 18) .addComponent(btnClose))) .addContainerGap()) ); layout.setVerticalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup() .addContainerGap() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jLabel1) .addComponent(cmbUsers, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addGap(18, 18, 18) .addComponent(jLabel2)) .addGroup(layout.createSequentialGroup() .addGap(26, 26, 26) .addComponent(txtPassword, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))) .addGap(18, 18, 18) .addComponent(btnUpdatePwd) .addGap(18, 18, 18) .addComponent(lblSuccess) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(lblError, javax.swing.GroupLayout.DEFAULT_SIZE, 36, Short.MAX_VALUE) .addComponent(btnClose) .addComponent(okButton))) ); getRootPane().setDefaultButton(okButton); pack(); }// </editor-fold>//GEN-END:initComponents private void okButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_okButtonActionPerformed doClose(RET_OK); }//GEN-LAST:event_okButtonActionPerformed /** * Closes the dialog */ private void closeDialog(java.awt.event.WindowEvent evt) {//GEN-FIRST:event_closeDialog doClose(RET_CANCEL); }//GEN-LAST:event_closeDialog private void btnCloseActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnCloseActionPerformed // TODO add your handling code here: doClose(RET_CANCEL); }//GEN-LAST:event_btnCloseActionPerformed private void doClose(int retStatus) { returnStatus = retStatus; setVisible(false); dispose(); } /** * @param args the command line arguments */ public static void main(String args[]) { /* Set the Nimbus look and feel */ //<editor-fold defaultstate="collapsed" desc=" Look and feel setting code (optional) "> /* If Nimbus (introduced in Java SE 6) is not available, stay with the default look and feel. * For details see http://download.oracle.com/javase/tutorial/uiswing/lookandfeel/plaf.html */ try { for (javax.swing.UIManager.LookAndFeelInfo info : javax.swing.UIManager.getInstalledLookAndFeels()) { if ("Nimbus".equals(info.getName())) { javax.swing.UIManager.setLookAndFeel(info.getClassName()); break; } } } catch (ClassNotFoundException ex) { java.util.logging.Logger.getLogger(CredentialChange.class.getName()).log(java.util.logging.Level.SEVERE, null, ex); } catch (InstantiationException ex) { java.util.logging.Logger.getLogger(CredentialChange.class.getName()).log(java.util.logging.Level.SEVERE, null, ex); } catch (IllegalAccessException ex) { java.util.logging.Logger.getLogger(CredentialChange.class.getName()).log(java.util.logging.Level.SEVERE, null, ex); } catch (javax.swing.UnsupportedLookAndFeelException ex) { java.util.logging.Logger.getLogger(CredentialChange.class.getName()).log(java.util.logging.Level.SEVERE, null, ex); } //</editor-fold> /* Create and display the dialog */ java.awt.EventQueue.invokeLater(new Runnable() { public void run() { CredentialChange dialog = new CredentialChange(new javax.swing.JFrame(), true); dialog.addWindowListener(new java.awt.event.WindowAdapter() { @Override public void windowClosing(java.awt.event.WindowEvent e) { System.exit(0); } }); dialog.setVisible(true); } }); } // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JButton btnClose; private javax.swing.JButton btnUpdatePwd; private javax.swing.JComboBox cmbUsers; private javax.swing.JLabel jLabel1; private javax.swing.JLabel jLabel2; private javax.swing.JLabel lblError; private javax.swing.JLabel lblSuccess; private javax.swing.JButton okButton; private javax.swing.JPasswordField txtPassword; // End of variables declaration//GEN-END:variables private int returnStatus = RET_CANCEL; }
/* * Generated by the Jasper component of Apache Tomcat * Version: Apache Tomcat/8.0.41 * Generated at: 2017-05-03 01:14:28 UTC * Note: The last modified time of this file was set to * the last modified time of the source file after * generation to assist with modification tracking. */ package org.apache.jsp.WEB_002dINF.views; import javax.servlet.*; import javax.servlet.http.*; import javax.servlet.jsp.*; public final class post_jsp extends org.apache.jasper.runtime.HttpJspBase implements org.apache.jasper.runtime.JspSourceDependent, org.apache.jasper.runtime.JspSourceImports { private static final javax.servlet.jsp.JspFactory _jspxFactory = javax.servlet.jsp.JspFactory.getDefaultFactory(); private static java.util.Map<java.lang.String,java.lang.Long> _jspx_dependants; static { _jspx_dependants = new java.util.HashMap<java.lang.String,java.lang.Long>(7); _jspx_dependants.put("jar:file:/C:/Users/Kristiano/workspace-curso-jpa/.metadata/.plugins/org.eclipse.wst.server.core/tmp0/wtpwebapps/blog/WEB-INF/lib/jstl-1.2.jar!/META-INF/fn.tld", Long.valueOf(1153395882000L)); _jspx_dependants.put("/WEB-INF/lib/spring-webmvc-4.3.6.RELEASE.jar", Long.valueOf(1486425159377L)); _jspx_dependants.put("jar:file:/C:/Users/Kristiano/workspace-curso-jpa/.metadata/.plugins/org.eclipse.wst.server.core/tmp0/wtpwebapps/blog/WEB-INF/lib/spring-webmvc-4.3.6.RELEASE.jar!/META-INF/spring-form.tld", Long.valueOf(1485357388000L)); _jspx_dependants.put("/WEB-INF/lib/jstl-1.2.jar", Long.valueOf(1470097380024L)); _jspx_dependants.put("/WEB-INF/jspl/taglibs.jspf", Long.valueOf(1493774007834L)); _jspx_dependants.put("jar:file:/C:/Users/Kristiano/workspace-curso-jpa/.metadata/.plugins/org.eclipse.wst.server.core/tmp0/wtpwebapps/blog/WEB-INF/lib/jstl-1.2.jar!/META-INF/c.tld", Long.valueOf(1153395882000L)); _jspx_dependants.put("jar:file:/C:/Users/Kristiano/workspace-curso-jpa/.metadata/.plugins/org.eclipse.wst.server.core/tmp0/wtpwebapps/blog/WEB-INF/lib/jstl-1.2.jar!/META-INF/fmt.tld", Long.valueOf(1153395882000L)); } private static final java.util.Set<java.lang.String> _jspx_imports_packages; private static final java.util.Set<java.lang.String> _jspx_imports_classes; static { _jspx_imports_packages = new java.util.HashSet<>(); _jspx_imports_packages.add("javax.servlet"); _jspx_imports_packages.add("javax.servlet.http"); _jspx_imports_packages.add("javax.servlet.jsp"); _jspx_imports_classes = null; } private org.apache.jasper.runtime.TagHandlerPool _005fjspx_005ftagPool_005fc_005furl_0026_005fvalue_005fnobody; private org.apache.jasper.runtime.TagHandlerPool _005fjspx_005ftagPool_005fc_005fimport_0026_005furl_005fnobody; private org.apache.jasper.runtime.TagHandlerPool _005fjspx_005ftagPool_005ffmt_005fparseDate_0026_005fvar_005fvalue_005fpattern_005fnobody; private org.apache.jasper.runtime.TagHandlerPool _005fjspx_005ftagPool_005ffmt_005fformatDate_0026_005fvalue_005ftype_005fnobody; private org.apache.jasper.runtime.TagHandlerPool _005fjspx_005ftagPool_005fc_005fforEach_0026_005fvar_005fitems; private volatile javax.el.ExpressionFactory _el_expressionfactory; private volatile org.apache.tomcat.InstanceManager _jsp_instancemanager; public java.util.Map<java.lang.String,java.lang.Long> getDependants() { return _jspx_dependants; } public java.util.Set<java.lang.String> getPackageImports() { return _jspx_imports_packages; } public java.util.Set<java.lang.String> getClassImports() { return _jspx_imports_classes; } public javax.el.ExpressionFactory _jsp_getExpressionFactory() { if (_el_expressionfactory == null) { synchronized (this) { if (_el_expressionfactory == null) { _el_expressionfactory = _jspxFactory.getJspApplicationContext(getServletConfig().getServletContext()).getExpressionFactory(); } } } return _el_expressionfactory; } public org.apache.tomcat.InstanceManager _jsp_getInstanceManager() { if (_jsp_instancemanager == null) { synchronized (this) { if (_jsp_instancemanager == null) { _jsp_instancemanager = org.apache.jasper.runtime.InstanceManagerFactory.getInstanceManager(getServletConfig()); } } } return _jsp_instancemanager; } public void _jspInit() { _005fjspx_005ftagPool_005fc_005furl_0026_005fvalue_005fnobody = org.apache.jasper.runtime.TagHandlerPool.getTagHandlerPool(getServletConfig()); _005fjspx_005ftagPool_005fc_005fimport_0026_005furl_005fnobody = org.apache.jasper.runtime.TagHandlerPool.getTagHandlerPool(getServletConfig()); _005fjspx_005ftagPool_005ffmt_005fparseDate_0026_005fvar_005fvalue_005fpattern_005fnobody = org.apache.jasper.runtime.TagHandlerPool.getTagHandlerPool(getServletConfig()); _005fjspx_005ftagPool_005ffmt_005fformatDate_0026_005fvalue_005ftype_005fnobody = org.apache.jasper.runtime.TagHandlerPool.getTagHandlerPool(getServletConfig()); _005fjspx_005ftagPool_005fc_005fforEach_0026_005fvar_005fitems = org.apache.jasper.runtime.TagHandlerPool.getTagHandlerPool(getServletConfig()); } public void _jspDestroy() { _005fjspx_005ftagPool_005fc_005furl_0026_005fvalue_005fnobody.release(); _005fjspx_005ftagPool_005fc_005fimport_0026_005furl_005fnobody.release(); _005fjspx_005ftagPool_005ffmt_005fparseDate_0026_005fvar_005fvalue_005fpattern_005fnobody.release(); _005fjspx_005ftagPool_005ffmt_005fformatDate_0026_005fvalue_005ftype_005fnobody.release(); _005fjspx_005ftagPool_005fc_005fforEach_0026_005fvar_005fitems.release(); } public void _jspService(final javax.servlet.http.HttpServletRequest request, final javax.servlet.http.HttpServletResponse response) throws java.io.IOException, javax.servlet.ServletException { final java.lang.String _jspx_method = request.getMethod(); if (!"GET".equals(_jspx_method) && !"POST".equals(_jspx_method) && !"HEAD".equals(_jspx_method) && !javax.servlet.DispatcherType.ERROR.equals(request.getDispatcherType())) { response.sendError(HttpServletResponse.SC_METHOD_NOT_ALLOWED, "JSPs only permit GET POST or HEAD"); return; } final javax.servlet.jsp.PageContext pageContext; javax.servlet.http.HttpSession session = null; final javax.servlet.ServletContext application; final javax.servlet.ServletConfig config; javax.servlet.jsp.JspWriter out = null; final java.lang.Object page = this; javax.servlet.jsp.JspWriter _jspx_out = null; javax.servlet.jsp.PageContext _jspx_page_context = null; try { response.setContentType("text/html; charset=UTF-8"); pageContext = _jspxFactory.getPageContext(this, request, response, null, true, 8192, true); _jspx_page_context = pageContext; application = pageContext.getServletContext(); config = pageContext.getServletConfig(); session = pageContext.getSession(); out = pageContext.getOut(); _jspx_out = out; out.write("\r\n"); out.write("\r\n"); out.write("\r\n"); out.write("\r\n"); out.write("<!DOCTYPE html\">\r\n"); out.write("<html>\r\n"); out.write("<head>\r\n"); out.write("<meta http-equiv=\"Content-Type\" content=\"text/html; charset=UTF-8\">\r\n"); out.write("<title>"); out.write((java.lang.String) org.apache.jasper.runtime.PageContextImpl.proprietaryEvaluate("${postagem.titulo}", java.lang.String.class, (javax.servlet.jsp.PageContext)_jspx_page_context, null)); out.write("</title>\r\n"); out.write("<link type=\"text/css\" rel=\"stylesheet\" href='"); if (_jspx_meth_c_005furl_005f0(_jspx_page_context)) return; out.write("'>\r\n"); out.write("</head>\r\n"); out.write("<body>\r\n"); out.write(" <fieldset class=\"header\">\r\n"); out.write(" \t<h1>Blog do Curso de Spring Data JPA | Devmedia</h1>\r\n"); out.write(" </fieldset>\r\n"); out.write(" \r\n"); out.write("\t"); if (_jspx_meth_c_005fimport_005f0(_jspx_page_context)) return; out.write("\r\n"); out.write("\t<br>\r\n"); out.write("\t\r\n"); out.write("\t<fieldset>\r\n"); out.write("\t\t\r\n"); out.write("\t\t<div>\r\n"); out.write("\t\t\t<div>\r\n"); out.write("\t\t\t\t<h2>"); out.write((java.lang.String) org.apache.jasper.runtime.PageContextImpl.proprietaryEvaluate("${postagem.titulo}", java.lang.String.class, (javax.servlet.jsp.PageContext)_jspx_page_context, null)); out.write("</h2>\r\n"); out.write("\t\t\t\t<p>Autor: <a href=\""); if (_jspx_meth_c_005furl_005f1(_jspx_page_context)) return; out.write('"'); out.write('>'); out.write(' '); out.write((java.lang.String) org.apache.jasper.runtime.PageContextImpl.proprietaryEvaluate("${postagem.autor.nome}", java.lang.String.class, (javax.servlet.jsp.PageContext)_jspx_page_context, null)); out.write(" </a> \r\n"); out.write("\t\t\t\t\t"); if (_jspx_meth_fmt_005fparseDate_005f0(_jspx_page_context)) return; out.write("\r\n"); out.write("\t\t\t\t| Data: "); if (_jspx_meth_fmt_005fformatDate_005f0(_jspx_page_context)) return; out.write("\r\n"); out.write("\t\t\t\t </p>\r\n"); out.write("\t\t\t</div>\r\n"); out.write("\t\t\t<div>\r\n"); out.write("\t\t\t\t<p class=\"post-texto\">"); out.write((java.lang.String) org.apache.jasper.runtime.PageContextImpl.proprietaryEvaluate("${postagem.texto}", java.lang.String.class, (javax.servlet.jsp.PageContext)_jspx_page_context, null)); out.write("</p>\r\n"); out.write("\t\t\t</div>\r\n"); out.write("\t\t\t<div>\r\n"); out.write("\t\t\t\t<p class=\"post-categ\">\r\n"); out.write("\t\t\t\t\t<span>Categorias:</span>\r\n"); out.write("\t\t\t\t\t"); if (_jspx_meth_c_005fforEach_005f0(_jspx_page_context)) return; out.write("\r\n"); out.write("\t\t\t\t</p>\r\n"); out.write("\t\t\t</div>\r\n"); out.write("\t\t\t<div class=\"post-autor\">\r\n"); out.write("\t\t\t\t<img class=\"post-avatar\" src=\""); if (_jspx_meth_c_005furl_005f3(_jspx_page_context)) return; out.write(" \"/>\r\n"); out.write("\t\t\t\t<p><strong>"); out.write((java.lang.String) org.apache.jasper.runtime.PageContextImpl.proprietaryEvaluate("${postagem.autor.nome}", java.lang.String.class, (javax.servlet.jsp.PageContext)_jspx_page_context, null)); out.write("</strong></p>\r\n"); out.write("\t\t\t\t<p>"); out.write((java.lang.String) org.apache.jasper.runtime.PageContextImpl.proprietaryEvaluate("${postagem.autor.biografia}", java.lang.String.class, (javax.servlet.jsp.PageContext)_jspx_page_context, null)); out.write("</p>\r\n"); out.write("\t\t\t</div>\r\n"); out.write("\t\t</div>\r\n"); out.write("\t\t"); if (_jspx_meth_c_005fimport_005f1(_jspx_page_context)) return; out.write("\r\n"); out.write("\t</fieldset>\r\n"); out.write("\t\r\n"); out.write("</body>\r\n"); out.write("</html>"); } catch (java.lang.Throwable t) { if (!(t instanceof javax.servlet.jsp.SkipPageException)){ out = _jspx_out; if (out != null && out.getBufferSize() != 0) try { if (response.isCommitted()) { out.flush(); } else { out.clearBuffer(); } } catch (java.io.IOException e) {} if (_jspx_page_context != null) _jspx_page_context.handlePageException(t); else throw new ServletException(t); } } finally { _jspxFactory.releasePageContext(_jspx_page_context); } } private boolean _jspx_meth_c_005furl_005f0(javax.servlet.jsp.PageContext _jspx_page_context) throws java.lang.Throwable { javax.servlet.jsp.PageContext pageContext = _jspx_page_context; javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut(); // c:url org.apache.taglibs.standard.tag.rt.core.UrlTag _jspx_th_c_005furl_005f0 = (org.apache.taglibs.standard.tag.rt.core.UrlTag) _005fjspx_005ftagPool_005fc_005furl_0026_005fvalue_005fnobody.get(org.apache.taglibs.standard.tag.rt.core.UrlTag.class); boolean _jspx_th_c_005furl_005f0_reused = false; try { _jspx_th_c_005furl_005f0.setPageContext(_jspx_page_context); _jspx_th_c_005furl_005f0.setParent(null); // /WEB-INF/views/post.jsp(8,45) name = value type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null _jspx_th_c_005furl_005f0.setValue("/css/style.css"); int _jspx_eval_c_005furl_005f0 = _jspx_th_c_005furl_005f0.doStartTag(); if (_jspx_th_c_005furl_005f0.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) { return true; } _005fjspx_005ftagPool_005fc_005furl_0026_005fvalue_005fnobody.reuse(_jspx_th_c_005furl_005f0); _jspx_th_c_005furl_005f0_reused = true; } finally { if (!_jspx_th_c_005furl_005f0_reused) { _jspx_th_c_005furl_005f0.release(); _jsp_getInstanceManager().destroyInstance(_jspx_th_c_005furl_005f0); } } return false; } private boolean _jspx_meth_c_005fimport_005f0(javax.servlet.jsp.PageContext _jspx_page_context) throws java.lang.Throwable { javax.servlet.jsp.PageContext pageContext = _jspx_page_context; javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut(); // c:import org.apache.taglibs.standard.tag.rt.core.ImportTag _jspx_th_c_005fimport_005f0 = (org.apache.taglibs.standard.tag.rt.core.ImportTag) _005fjspx_005ftagPool_005fc_005fimport_0026_005furl_005fnobody.get(org.apache.taglibs.standard.tag.rt.core.ImportTag.class); boolean _jspx_th_c_005fimport_005f0_reused = false; try { _jspx_th_c_005fimport_005f0.setPageContext(_jspx_page_context); _jspx_th_c_005fimport_005f0.setParent(null); // /WEB-INF/views/post.jsp(15,1) name = url type = null reqTime = true required = true fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null _jspx_th_c_005fimport_005f0.setUrl("menu.jsp"); int[] _jspx_push_body_count_c_005fimport_005f0 = new int[] { 0 }; try { int _jspx_eval_c_005fimport_005f0 = _jspx_th_c_005fimport_005f0.doStartTag(); if (_jspx_th_c_005fimport_005f0.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) { return true; } } catch (java.lang.Throwable _jspx_exception) { while (_jspx_push_body_count_c_005fimport_005f0[0]-- > 0) out = _jspx_page_context.popBody(); _jspx_th_c_005fimport_005f0.doCatch(_jspx_exception); } finally { _jspx_th_c_005fimport_005f0.doFinally(); } _005fjspx_005ftagPool_005fc_005fimport_0026_005furl_005fnobody.reuse(_jspx_th_c_005fimport_005f0); _jspx_th_c_005fimport_005f0_reused = true; } finally { if (!_jspx_th_c_005fimport_005f0_reused) { _jspx_th_c_005fimport_005f0.release(); _jsp_getInstanceManager().destroyInstance(_jspx_th_c_005fimport_005f0); } } return false; } private boolean _jspx_meth_c_005furl_005f1(javax.servlet.jsp.PageContext _jspx_page_context) throws java.lang.Throwable { javax.servlet.jsp.PageContext pageContext = _jspx_page_context; javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut(); // c:url org.apache.taglibs.standard.tag.rt.core.UrlTag _jspx_th_c_005furl_005f1 = (org.apache.taglibs.standard.tag.rt.core.UrlTag) _005fjspx_005ftagPool_005fc_005furl_0026_005fvalue_005fnobody.get(org.apache.taglibs.standard.tag.rt.core.UrlTag.class); boolean _jspx_th_c_005furl_005f1_reused = false; try { _jspx_th_c_005furl_005f1.setPageContext(_jspx_page_context); _jspx_th_c_005furl_005f1.setParent(null); // /WEB-INF/views/post.jsp(23,23) name = value type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null _jspx_th_c_005furl_005f1.setValue((java.lang.String) org.apache.jasper.runtime.PageContextImpl.proprietaryEvaluate("/autor/${postagem.autor.nome}", java.lang.String.class, (javax.servlet.jsp.PageContext)_jspx_page_context, null)); int _jspx_eval_c_005furl_005f1 = _jspx_th_c_005furl_005f1.doStartTag(); if (_jspx_th_c_005furl_005f1.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) { return true; } _005fjspx_005ftagPool_005fc_005furl_0026_005fvalue_005fnobody.reuse(_jspx_th_c_005furl_005f1); _jspx_th_c_005furl_005f1_reused = true; } finally { if (!_jspx_th_c_005furl_005f1_reused) { _jspx_th_c_005furl_005f1.release(); _jsp_getInstanceManager().destroyInstance(_jspx_th_c_005furl_005f1); } } return false; } private boolean _jspx_meth_fmt_005fparseDate_005f0(javax.servlet.jsp.PageContext _jspx_page_context) throws java.lang.Throwable { javax.servlet.jsp.PageContext pageContext = _jspx_page_context; javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut(); // fmt:parseDate org.apache.taglibs.standard.tag.rt.fmt.ParseDateTag _jspx_th_fmt_005fparseDate_005f0 = (org.apache.taglibs.standard.tag.rt.fmt.ParseDateTag) _005fjspx_005ftagPool_005ffmt_005fparseDate_0026_005fvar_005fvalue_005fpattern_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.ParseDateTag.class); boolean _jspx_th_fmt_005fparseDate_005f0_reused = false; try { _jspx_th_fmt_005fparseDate_005f0.setPageContext(_jspx_page_context); _jspx_th_fmt_005fparseDate_005f0.setParent(null); // /WEB-INF/views/post.jsp(24,5) name = var type = java.lang.String reqTime = false required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null _jspx_th_fmt_005fparseDate_005f0.setVar("date"); // /WEB-INF/views/post.jsp(24,5) name = value type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null _jspx_th_fmt_005fparseDate_005f0.setValue((java.lang.String) org.apache.jasper.runtime.PageContextImpl.proprietaryEvaluate("${postagem.dataPostagem}", java.lang.String.class, (javax.servlet.jsp.PageContext)_jspx_page_context, null)); // /WEB-INF/views/post.jsp(24,5) name = pattern type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null _jspx_th_fmt_005fparseDate_005f0.setPattern("yyyy-MM-dd'T'HH:mm:ss"); int _jspx_eval_fmt_005fparseDate_005f0 = _jspx_th_fmt_005fparseDate_005f0.doStartTag(); if (_jspx_th_fmt_005fparseDate_005f0.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) { return true; } _005fjspx_005ftagPool_005ffmt_005fparseDate_0026_005fvar_005fvalue_005fpattern_005fnobody.reuse(_jspx_th_fmt_005fparseDate_005f0); _jspx_th_fmt_005fparseDate_005f0_reused = true; } finally { if (!_jspx_th_fmt_005fparseDate_005f0_reused) { _jspx_th_fmt_005fparseDate_005f0.release(); _jsp_getInstanceManager().destroyInstance(_jspx_th_fmt_005fparseDate_005f0); } } return false; } private boolean _jspx_meth_fmt_005fformatDate_005f0(javax.servlet.jsp.PageContext _jspx_page_context) throws java.lang.Throwable { javax.servlet.jsp.PageContext pageContext = _jspx_page_context; javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut(); // fmt:formatDate org.apache.taglibs.standard.tag.rt.fmt.FormatDateTag _jspx_th_fmt_005fformatDate_005f0 = (org.apache.taglibs.standard.tag.rt.fmt.FormatDateTag) _005fjspx_005ftagPool_005ffmt_005fformatDate_0026_005fvalue_005ftype_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.FormatDateTag.class); boolean _jspx_th_fmt_005fformatDate_005f0_reused = false; try { _jspx_th_fmt_005fformatDate_005f0.setPageContext(_jspx_page_context); _jspx_th_fmt_005fformatDate_005f0.setParent(null); // /WEB-INF/views/post.jsp(25,12) name = value type = null reqTime = true required = true fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null _jspx_th_fmt_005fformatDate_005f0.setValue((java.util.Date) org.apache.jasper.runtime.PageContextImpl.proprietaryEvaluate("${date}", java.util.Date.class, (javax.servlet.jsp.PageContext)_jspx_page_context, null)); // /WEB-INF/views/post.jsp(25,12) name = type type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null _jspx_th_fmt_005fformatDate_005f0.setType("both"); int _jspx_eval_fmt_005fformatDate_005f0 = _jspx_th_fmt_005fformatDate_005f0.doStartTag(); if (_jspx_th_fmt_005fformatDate_005f0.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) { return true; } _005fjspx_005ftagPool_005ffmt_005fformatDate_0026_005fvalue_005ftype_005fnobody.reuse(_jspx_th_fmt_005fformatDate_005f0); _jspx_th_fmt_005fformatDate_005f0_reused = true; } finally { if (!_jspx_th_fmt_005fformatDate_005f0_reused) { _jspx_th_fmt_005fformatDate_005f0.release(); _jsp_getInstanceManager().destroyInstance(_jspx_th_fmt_005fformatDate_005f0); } } return false; } private boolean _jspx_meth_c_005fforEach_005f0(javax.servlet.jsp.PageContext _jspx_page_context) throws java.lang.Throwable { javax.servlet.jsp.PageContext pageContext = _jspx_page_context; javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut(); // c:forEach org.apache.taglibs.standard.tag.rt.core.ForEachTag _jspx_th_c_005fforEach_005f0 = (org.apache.taglibs.standard.tag.rt.core.ForEachTag) _005fjspx_005ftagPool_005fc_005fforEach_0026_005fvar_005fitems.get(org.apache.taglibs.standard.tag.rt.core.ForEachTag.class); boolean _jspx_th_c_005fforEach_005f0_reused = false; try { _jspx_th_c_005fforEach_005f0.setPageContext(_jspx_page_context); _jspx_th_c_005fforEach_005f0.setParent(null); // /WEB-INF/views/post.jsp(34,5) name = var type = java.lang.String reqTime = false required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null _jspx_th_c_005fforEach_005f0.setVar("c"); // /WEB-INF/views/post.jsp(34,5) name = items type = javax.el.ValueExpression reqTime = true required = false fragment = false deferredValue = true expectedTypeName = java.lang.Object deferredMethod = false methodSignature = null _jspx_th_c_005fforEach_005f0.setItems(new org.apache.jasper.el.JspValueExpression("/WEB-INF/views/post.jsp(34,5) '${postagem.categorias}'",_jsp_getExpressionFactory().createValueExpression(_jspx_page_context.getELContext(),"${postagem.categorias}",java.lang.Object.class)).getValue(_jspx_page_context.getELContext())); int[] _jspx_push_body_count_c_005fforEach_005f0 = new int[] { 0 }; try { int _jspx_eval_c_005fforEach_005f0 = _jspx_th_c_005fforEach_005f0.doStartTag(); if (_jspx_eval_c_005fforEach_005f0 != javax.servlet.jsp.tagext.Tag.SKIP_BODY) { do { out.write("\r\n"); out.write("\t\t\t\t\t\t<a href=\""); if (_jspx_meth_c_005furl_005f2(_jspx_th_c_005fforEach_005f0, _jspx_page_context, _jspx_push_body_count_c_005fforEach_005f0)) return true; out.write("\" title=\""); out.write((java.lang.String) org.apache.jasper.runtime.PageContextImpl.proprietaryEvaluate("${c.descricao}", java.lang.String.class, (javax.servlet.jsp.PageContext)_jspx_page_context, null)); out.write("\">\r\n"); out.write("\t\t\t\t\t\t "); out.write((java.lang.String) org.apache.jasper.runtime.PageContextImpl.proprietaryEvaluate("${c.descricao}", java.lang.String.class, (javax.servlet.jsp.PageContext)_jspx_page_context, null)); out.write(" </a>\r\n"); out.write("\t\t\t\t\t"); int evalDoAfterBody = _jspx_th_c_005fforEach_005f0.doAfterBody(); if (evalDoAfterBody != javax.servlet.jsp.tagext.BodyTag.EVAL_BODY_AGAIN) break; } while (true); } if (_jspx_th_c_005fforEach_005f0.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) { return true; } } catch (java.lang.Throwable _jspx_exception) { while (_jspx_push_body_count_c_005fforEach_005f0[0]-- > 0) out = _jspx_page_context.popBody(); _jspx_th_c_005fforEach_005f0.doCatch(_jspx_exception); } finally { _jspx_th_c_005fforEach_005f0.doFinally(); } _005fjspx_005ftagPool_005fc_005fforEach_0026_005fvar_005fitems.reuse(_jspx_th_c_005fforEach_005f0); _jspx_th_c_005fforEach_005f0_reused = true; } finally { if (!_jspx_th_c_005fforEach_005f0_reused) { _jspx_th_c_005fforEach_005f0.release(); _jsp_getInstanceManager().destroyInstance(_jspx_th_c_005fforEach_005f0); } } return false; } private boolean _jspx_meth_c_005furl_005f2(javax.servlet.jsp.tagext.JspTag _jspx_th_c_005fforEach_005f0, javax.servlet.jsp.PageContext _jspx_page_context, int[] _jspx_push_body_count_c_005fforEach_005f0) throws java.lang.Throwable { javax.servlet.jsp.PageContext pageContext = _jspx_page_context; javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut(); // c:url org.apache.taglibs.standard.tag.rt.core.UrlTag _jspx_th_c_005furl_005f2 = (org.apache.taglibs.standard.tag.rt.core.UrlTag) _005fjspx_005ftagPool_005fc_005furl_0026_005fvalue_005fnobody.get(org.apache.taglibs.standard.tag.rt.core.UrlTag.class); boolean _jspx_th_c_005furl_005f2_reused = false; try { _jspx_th_c_005furl_005f2.setPageContext(_jspx_page_context); _jspx_th_c_005furl_005f2.setParent((javax.servlet.jsp.tagext.Tag) _jspx_th_c_005fforEach_005f0); // /WEB-INF/views/post.jsp(35,15) name = value type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null _jspx_th_c_005furl_005f2.setValue((java.lang.String) org.apache.jasper.runtime.PageContextImpl.proprietaryEvaluate("/categoria/${c.permalink}", java.lang.String.class, (javax.servlet.jsp.PageContext)_jspx_page_context, null)); int _jspx_eval_c_005furl_005f2 = _jspx_th_c_005furl_005f2.doStartTag(); if (_jspx_th_c_005furl_005f2.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) { return true; } _005fjspx_005ftagPool_005fc_005furl_0026_005fvalue_005fnobody.reuse(_jspx_th_c_005furl_005f2); _jspx_th_c_005furl_005f2_reused = true; } finally { if (!_jspx_th_c_005furl_005f2_reused) { _jspx_th_c_005furl_005f2.release(); _jsp_getInstanceManager().destroyInstance(_jspx_th_c_005furl_005f2); } } return false; } private boolean _jspx_meth_c_005furl_005f3(javax.servlet.jsp.PageContext _jspx_page_context) throws java.lang.Throwable { javax.servlet.jsp.PageContext pageContext = _jspx_page_context; javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut(); // c:url org.apache.taglibs.standard.tag.rt.core.UrlTag _jspx_th_c_005furl_005f3 = (org.apache.taglibs.standard.tag.rt.core.UrlTag) _005fjspx_005ftagPool_005fc_005furl_0026_005fvalue_005fnobody.get(org.apache.taglibs.standard.tag.rt.core.UrlTag.class); boolean _jspx_th_c_005furl_005f3_reused = false; try { _jspx_th_c_005furl_005f3.setPageContext(_jspx_page_context); _jspx_th_c_005furl_005f3.setParent(null); // /WEB-INF/views/post.jsp(41,34) name = value type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null _jspx_th_c_005furl_005f3.setValue((java.lang.String) org.apache.jasper.runtime.PageContextImpl.proprietaryEvaluate("/avatar/load/${postagem.autor.usuario.avatar.id}", java.lang.String.class, (javax.servlet.jsp.PageContext)_jspx_page_context, null)); int _jspx_eval_c_005furl_005f3 = _jspx_th_c_005furl_005f3.doStartTag(); if (_jspx_th_c_005furl_005f3.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) { return true; } _005fjspx_005ftagPool_005fc_005furl_0026_005fvalue_005fnobody.reuse(_jspx_th_c_005furl_005f3); _jspx_th_c_005furl_005f3_reused = true; } finally { if (!_jspx_th_c_005furl_005f3_reused) { _jspx_th_c_005furl_005f3.release(); _jsp_getInstanceManager().destroyInstance(_jspx_th_c_005furl_005f3); } } return false; } private boolean _jspx_meth_c_005fimport_005f1(javax.servlet.jsp.PageContext _jspx_page_context) throws java.lang.Throwable { javax.servlet.jsp.PageContext pageContext = _jspx_page_context; javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut(); // c:import org.apache.taglibs.standard.tag.rt.core.ImportTag _jspx_th_c_005fimport_005f1 = (org.apache.taglibs.standard.tag.rt.core.ImportTag) _005fjspx_005ftagPool_005fc_005fimport_0026_005furl_005fnobody.get(org.apache.taglibs.standard.tag.rt.core.ImportTag.class); boolean _jspx_th_c_005fimport_005f1_reused = false; try { _jspx_th_c_005fimport_005f1.setPageContext(_jspx_page_context); _jspx_th_c_005fimport_005f1.setParent(null); // /WEB-INF/views/post.jsp(46,2) name = url type = null reqTime = true required = true fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null _jspx_th_c_005fimport_005f1.setUrl("comments.jsp"); int[] _jspx_push_body_count_c_005fimport_005f1 = new int[] { 0 }; try { int _jspx_eval_c_005fimport_005f1 = _jspx_th_c_005fimport_005f1.doStartTag(); if (_jspx_th_c_005fimport_005f1.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) { return true; } } catch (java.lang.Throwable _jspx_exception) { while (_jspx_push_body_count_c_005fimport_005f1[0]-- > 0) out = _jspx_page_context.popBody(); _jspx_th_c_005fimport_005f1.doCatch(_jspx_exception); } finally { _jspx_th_c_005fimport_005f1.doFinally(); } _005fjspx_005ftagPool_005fc_005fimport_0026_005furl_005fnobody.reuse(_jspx_th_c_005fimport_005f1); _jspx_th_c_005fimport_005f1_reused = true; } finally { if (!_jspx_th_c_005fimport_005f1_reused) { _jspx_th_c_005fimport_005f1.release(); _jsp_getInstanceManager().destroyInstance(_jspx_th_c_005fimport_005f1); } } return false; } }
/* * $Id$ */ /* Copyright (c) 2000-2015 Board of Trustees of Leland Stanford Jr. University, all rights reserved. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL STANFORD UNIVERSITY BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. Except as contained in this notice, the name of Stanford University shall not be used in advertising or otherwise to promote the sale, use or other dealings in this Software without prior written authorization from Stanford University. */ package org.lockss.plugin.atypon; import java.util.Arrays; import java.util.Iterator; import java.util.regex.Pattern; import org.lockss.config.TdbAu; import org.lockss.daemon.*; import org.lockss.extractor.ArticleMetadataExtractor; import org.lockss.extractor.ArticleMetadataExtractorFactory; import org.lockss.extractor.BaseArticleMetadataExtractor; import org.lockss.extractor.MetadataTarget; import org.lockss.plugin.*; import org.lockss.util.Logger; public class BaseAtyponArticleIteratorFactory implements ArticleIteratorFactory, ArticleMetadataExtractorFactory { protected static Logger log = Logger.getLogger(BaseAtyponArticleIteratorFactory.class); //arbitrary string is used in daemon for this private static final String ABSTRACTS_ONLY = "abstracts"; private static final String ROLE_PDFPLUS = "PdfPlus"; private static final String ROOT_TEMPLATE = "\"%sdoi/\", base_url"; // Only put the 'abs' in the pattern if used for primary; otherwise builder spews errors private static final String PATTERN_TEMPLATE_WITH_ABSTRACT = "\"^%sdoi/(abs|full|pdf|pdfplus)/[.0-9]+/\", base_url"; private static final String PATTERN_TEMPLATE = "\"^%sdoi/(full|pdf|pdfplus)/[.0-9]+/\", base_url"; // various aspects of an article // DOI's can have "/"s in the suffix private static final Pattern PDF_PATTERN = Pattern.compile("/doi/pdf/([.0-9]+)/([^?&]+)$", Pattern.CASE_INSENSITIVE); private static final Pattern ABSTRACT_PATTERN = Pattern.compile("/doi/abs/([.0-9]+)/([^?&]+)$", Pattern.CASE_INSENSITIVE); private static final Pattern HTML_PATTERN = Pattern.compile("/doi/full/([.0-9]+)/([^?&]+)$", Pattern.CASE_INSENSITIVE); private static final Pattern PDFPLUS_PATTERN = Pattern.compile("/doi/pdfplus/([.0-9]+)/([^?&]+)$", Pattern.CASE_INSENSITIVE); // how to change from one form (aspect) of article to another private static final String HTML_REPLACEMENT = "/doi/full/$1/$2"; private static final String ABSTRACT_REPLACEMENT = "/doi/abs/$1/$2"; private static final String PDF_REPLACEMENT = "/doi/pdf/$1/$2"; private static final String PDFPLUS_REPLACEMENT = "/doi/pdfplus/$1/$2"; // in support of books, this is equivalent of full book abstract (landing page) private static final String BOOK_REPLACEMENT = "/doi/book/$1/$2"; // Things not an "article" but in support of an article private static final String REFERENCES_REPLACEMENT = "/doi/ref/$1/$2"; private static final String SUPPL_REPLACEMENT = "/doi/suppl/$1/$2"; // MassMedical uses this second form for SUPPL materials private static final String SECOND_SUPPL_REPLACEMENT = "/action/showSupplements?doi=$1%2F$2"; // link extractor used forms to pick up this URL /* TODO: Note that if the DOI suffix has a "/" this will not work because the * slashes that are part of the DOI will not get encoded so they don't * match the CU. Waiting for builder support for smarter replacement * Taylor & Francis works around this because it has current need */ // After normalization, the citation information will live at this URL if it exists private static final String RIS_REPLACEMENT = "/action/downloadCitation?doi=$1%2F$2&format=ris&include=cit"; // AMetSoc doens't do an "include=cit", only "include=abs" // Do these as two separate patterns (not "OR") so we can have a priority choice private static final String SECOND_RIS_REPLACEMENT = "/action/downloadCitation?doi=$1%2F$2&format=ris&include=abs"; // // On an Atypon publisher, article content may look like this but you do not know // how many of the aspects will exist for a particular journal // // <atyponbase>.org/doi/abs/10.3366/drs.2011.0010 (abstract or summary) // <atyponbase>.org/doi/full/10.3366/drs.2011.0010 (full text html) // <atyponbase>.org/doi/pdf/10.3366/drs.2011.0010 (full text pdf) // <atyponbase>.org/doi/pdfplus/10.3366/drs.2011.0010 (fancy pdf - could be in frameset or could have active links) // <atyponbase>.org/doi/suppl/10.3366/drs.2011.0010 (page from which you can access supplementary info) // <atyponbase>.org/doi/ref/10.3366/drs.2011.0010 (page with references on it) // // note: at least one publisher has a doi suffix that includes a "/", eg: // t&f,writing systems research - vol3, issue2 // <base>/doi/pdfplus/10.1093/wsr/wsr0023 // // There is the possibility of downloaded citation information which will get normalized to look something like this: // <atyponbase>.org/action/downloadCitation?doi=<partone>%2F<parttwo>&format=ris&include=cit // @Override public Iterator<ArticleFiles> createArticleIterator(ArchivalUnit au, MetadataTarget target) throws PluginException { SubTreeArticleIteratorBuilder builder = localBuilderCreator(au); if (isAbstractOnly(au)) { builder.setSpec(target, ROOT_TEMPLATE, PATTERN_TEMPLATE_WITH_ABSTRACT, Pattern.CASE_INSENSITIVE); } else { builder.setSpec(target, ROOT_TEMPLATE, PATTERN_TEMPLATE, Pattern.CASE_INSENSITIVE); } // The order in which these aspects are added is important. They determine which will trigger // the ArticleFiles and if you are only counting articles (not pulling metadata) then the // lower aspects aren't looked for, once you get a match. // set up PDF to be an aspect that will trigger an ArticleFiles builder.addAspect(PDF_PATTERN, PDF_REPLACEMENT, ArticleFiles.ROLE_FULL_TEXT_PDF); // set up PDFPLUS to be an aspect that will trigger an ArticleFiles builder.addAspect(PDFPLUS_PATTERN, PDFPLUS_REPLACEMENT, ROLE_PDFPLUS); // set up full text html to be an aspect that will trigger an ArticleFiles builder.addAspect(HTML_PATTERN, HTML_REPLACEMENT, ArticleFiles.ROLE_FULL_TEXT_HTML, ArticleFiles.ROLE_ARTICLE_METADATA); // use for metadata if abstract doesn't exist if (isAbstractOnly(au)) { // When part of an abstract only AU, set up an abstract to be an aspect // that will trigger an articleFiles. // This also means an abstract could be considered a FULL_TEXT_CU until this is deprecated builder.addAspect(ABSTRACT_PATTERN, ABSTRACT_REPLACEMENT, ArticleFiles.ROLE_ABSTRACT, ArticleFiles.ROLE_FULL_TEXT_HTML, ArticleFiles.ROLE_ARTICLE_METADATA); } else { // If this isn't an "abstracts only" AU, an abstract alone should not // be enough to trigger an ArticleFiles builder.addAspect(ABSTRACT_REPLACEMENT, ArticleFiles.ROLE_ABSTRACT, ArticleFiles.ROLE_ARTICLE_METADATA); } // set a role, but it isn't sufficient to trigger an ArticleFiles builder.addAspect(BOOK_REPLACEMENT, ArticleFiles.ROLE_ABSTRACT); // set a role, but it isn't sufficient to trigger an ArticleFiles builder.addAspect(REFERENCES_REPLACEMENT, ArticleFiles.ROLE_REFERENCES); // set a role, but it isn't sufficient to trigger an ArticleFiles builder.addAspect(Arrays.asList( SUPPL_REPLACEMENT, SECOND_SUPPL_REPLACEMENT), ArticleFiles.ROLE_SUPPLEMENTARY_MATERIALS); // set a role, but it isn't sufficient to trigger an ArticleFiles // First choice is &include=cit; second choice is &include=abs (AMetSoc) builder.addAspect(Arrays.asList( RIS_REPLACEMENT, SECOND_RIS_REPLACEMENT), ArticleFiles.ROLE_CITATION_RIS); // The order in which we want to define full_text_cu. // First one that exists will get the job // For AUs that are all or partially abstract only, add in this option but // leave the full-text as the priorities if (isAbstractOnly(au)) { builder.setFullTextFromRoles(ArticleFiles.ROLE_FULL_TEXT_PDF, ArticleFiles.ROLE_FULL_TEXT_HTML, ROLE_PDFPLUS, ArticleFiles.ROLE_ABSTRACT); } else { builder.setFullTextFromRoles(ArticleFiles.ROLE_FULL_TEXT_PDF, ArticleFiles.ROLE_FULL_TEXT_HTML, ROLE_PDFPLUS); } // The order in which we want to define what a PDF is // if we only have PDFPLUS, that should become a FULL_TEXT_PDF builder.setRoleFromOtherRoles(ArticleFiles.ROLE_FULL_TEXT_PDF, ArticleFiles.ROLE_FULL_TEXT_PDF, ROLE_PDFPLUS); // this should be ROLE_PDFPLUS when it's defined // set the ROLE_ARTICLE_METADATA to the first one that exists builder.setRoleFromOtherRoles(ArticleFiles.ROLE_ARTICLE_METADATA, ArticleFiles.ROLE_CITATION_RIS, ArticleFiles.ROLE_ABSTRACT, ArticleFiles.ROLE_FULL_TEXT_HTML); return builder.getSubTreeArticleIterator(); } // Enclose the method that creates the builder to allow a child to do additional processing // for example Taylor&Francis protected SubTreeArticleIteratorBuilder localBuilderCreator(ArchivalUnit au) { return new SubTreeArticleIteratorBuilder(au); } @Override public ArticleMetadataExtractor createArticleMetadataExtractor(MetadataTarget target) throws PluginException { return new BaseArticleMetadataExtractor(ArticleFiles.ROLE_ARTICLE_METADATA); } // return true if the AU is of type "abstracts" private static boolean isAbstractOnly(ArchivalUnit au) { TdbAu tdbAu = au.getTdbAu(); return tdbAu != null && ABSTRACTS_ONLY.equals(tdbAu.getCoverageDepth()); } }
package com.dke.pursuitevasion.Entities; import com.badlogic.ashley.core.Entity; import com.badlogic.gdx.Gdx; import com.badlogic.gdx.files.FileHandle; import com.badlogic.gdx.graphics.*; import com.badlogic.gdx.graphics.g3d.Material; import com.badlogic.gdx.graphics.g3d.Model; import com.badlogic.gdx.graphics.g3d.ModelInstance; import com.badlogic.gdx.graphics.g3d.attributes.ColorAttribute; import com.badlogic.gdx.graphics.g3d.attributes.TextureAttribute; import com.badlogic.gdx.graphics.g3d.utils.ModelBuilder; import com.badlogic.gdx.math.Quaternion; import com.badlogic.gdx.math.Vector2; import com.badlogic.gdx.math.Vector3; import com.dke.pursuitevasion.CXSearchingAlgorithm.CXAgentState; import com.dke.pursuitevasion.EdgeVectors; import com.dke.pursuitevasion.Entities.Components.*; import com.dke.pursuitevasion.Entities.Components.agents.CCTvComponent; import com.dke.pursuitevasion.Entities.Components.agents.EvaderComponent; import com.dke.pursuitevasion.Entities.Components.agents.PursuerComponent; import com.dke.pursuitevasion.WallInfo; import static com.badlogic.gdx.graphics.GL20.GL_TRIANGLES; /** * Created by Nicola Gheza on 20/03/2017. */ public class EntityFactory { static public int pursuerCounter = 0; static EntityFactory instance; static EntityFactory getInstance() { if (instance != null) return instance; return instance = new EntityFactory(); } public Entity createAgent(Vector3 position, Color color) { Entity entity = new Entity(); StateComponent transformComponent = new StateComponent(); transformComponent.position = position; transformComponent.orientation = new Quaternion(position,0); transformComponent.update(); entity.add(transformComponent); // Adding ObserverComponent for VisionSystem ObserverComponent observerComponent = new ObserverComponent(); observerComponent.position = new Vector2(transformComponent.position.x, transformComponent.position.y); entity.add(observerComponent); //Create a sphere collider component EvaderComponent sphereColliderComponent = new EvaderComponent(); sphereColliderComponent.radius = 0.15f; entity.add(sphereColliderComponent); ModelBuilder modelBuilder = new ModelBuilder(); Model model = modelBuilder.createSphere(0.15f, 0.15f, 0.15f, 20, 20, new Material(ColorAttribute.createDiffuse(color)), VertexAttributes.Usage.Position | VertexAttributes.Usage.Normal); ModelInstance agentModel = new ModelInstance(model); GraphicsComponent graphicsComponent = new GraphicsComponent(); graphicsComponent.modelInstance = agentModel; entity.add(graphicsComponent); VisibleComponent visibleComponent = new VisibleComponent(); entity.add(visibleComponent); return entity; } public Entity createPursuer(Vector3 position, Color color, float visionDist) { Entity entity = new Entity(); StateComponent transformComponent = new StateComponent(); transformComponent.position = position; transformComponent.orientation = new Quaternion(position,0); transformComponent.update(); entity.add(transformComponent); // Adding ObserverComponent for VisionSystem ObserverComponent observerComponent = new ObserverComponent(); observerComponent.position = new Vector2(transformComponent.position.x, transformComponent.position.y); observerComponent.fovAngle = 360f; observerComponent.distance = visionDist; entity.add(observerComponent); // Adding ObserverComponent for VisionSystem ObservableComponent observableComponent = new ObservableComponent(); observableComponent.position = new Vector2(transformComponent.position.x, transformComponent.position.y); entity.add(observableComponent); PursuerComponent pursuerComponent = new PursuerComponent(); pursuerComponent.position = position; pursuerComponent.number = pursuerCounter++; pursuerComponent.setState(CXAgentState.Free); entity.add(pursuerComponent); ModelBuilder modelBuilder = new ModelBuilder(); Model model = modelBuilder.createSphere(0.15f, 0.15f, 0.15f, 20, 20, new Material(ColorAttribute.createDiffuse(color)), VertexAttributes.Usage.Position | VertexAttributes.Usage.Normal); ModelInstance agentModel = new ModelInstance(model); GraphicsComponent graphicsComponent = new GraphicsComponent(); graphicsComponent.modelInstance = agentModel; entity.add(graphicsComponent); VisibleComponent visibleComponent = new VisibleComponent(); entity.add(visibleComponent); return entity; } public Entity createCCTv(Vector3 position) { Entity entity = new Entity(); StateComponent transformComponent = new StateComponent(); transformComponent.position = position; transformComponent.orientation = new Quaternion(position,0); transformComponent.update(); entity.add(transformComponent); // Adding ObserverComponent for VisionSystem ObserverComponent observerComponent = new ObserverComponent(); observerComponent.position = new Vector2(transformComponent.position.x, transformComponent.position.y); observerComponent.distance = 4.5f; entity.add(observerComponent); CCTvComponent cctv = new CCTvComponent(); entity.add(cctv); ModelBuilder modelBuilder = new ModelBuilder(); Model model = modelBuilder.createSphere(0.15f, 0.15f, 0.15f, 20, 20, new Material(ColorAttribute.createDiffuse(Color.BLACK)), VertexAttributes.Usage.Position | VertexAttributes.Usage.Normal); ModelInstance agentModel = new ModelInstance(model); GraphicsComponent graphicsComponent = new GraphicsComponent(); graphicsComponent.modelInstance = agentModel; entity.add(graphicsComponent); VisibleComponent visibleComponent = new VisibleComponent(); entity.add(visibleComponent); return entity; } public Entity createEvader(Vector3 position, Color color) { Entity entity = new Entity(); StateComponent transformComponent = new StateComponent(); transformComponent.position = position; transformComponent.orientation = new Quaternion(position,0); transformComponent.update(); entity.add(transformComponent); // Adding ObserverComponent for VisionSystem ObservableComponent observableComponent = new ObservableComponent(); observableComponent.position = new Vector2(transformComponent.position.x, transformComponent.position.y); entity.add(observableComponent); // Adding ObserverComponent for VisionSystem ObserverComponent observerComponent = new ObserverComponent(); observerComponent.position = new Vector2(transformComponent.position.x, transformComponent.position.y); observerComponent.fovAngle = 0f; observerComponent.distance = 0f; entity.add(observerComponent); //Create a sphere collider component EvaderComponent sphereColliderComponent = new EvaderComponent(); sphereColliderComponent.radius = 0.15f; sphereColliderComponent.position = position; entity.add(sphereColliderComponent); ModelBuilder modelBuilder = new ModelBuilder(); Model model = modelBuilder.createSphere(0.15f, 0.15f, 0.15f, 20, 20, new Material(ColorAttribute.createDiffuse(color)), VertexAttributes.Usage.Position | VertexAttributes.Usage.Normal); ModelInstance agentModel = new ModelInstance(model); GraphicsComponent graphicsComponent = new GraphicsComponent(); graphicsComponent.modelInstance = agentModel; entity.add(graphicsComponent); VisibleComponent visibleComponent = new VisibleComponent(); entity.add(visibleComponent); return entity; } public Entity createTerrain(Mesh mesh, EdgeVectors[] edgeVectors) { Entity entity = new Entity(); FileHandle img = Gdx.files.internal("blueprint-1.jpg"); Texture texture = new Texture(img, Pixmap.Format.RGB565, false); texture.setWrap(Texture.TextureWrap.Repeat, Texture.TextureWrap.Repeat); texture.setFilter(Texture.TextureFilter.Linear, Texture.TextureFilter.Linear); StateComponent transformComponent = new StateComponent(); transformComponent.position = new Vector3(); transformComponent.orientation = new Quaternion(new Vector3(0, 0, 0), 0); entity.add(transformComponent); ModelBuilder modelBuilder = new ModelBuilder(); modelBuilder.begin(); modelBuilder.part("1", mesh, GL_TRIANGLES, new Material(new TextureAttribute(TextureAttribute.Diffuse, texture))); Model model = modelBuilder.end(); //ModelInstance polygonModel = new ModelInstance(model); //Add model to the entity GraphicsComponent graphicsComponent = new GraphicsComponent(); graphicsComponent.mesh = mesh; entity.add(graphicsComponent); //Make it visible VisibleComponent visibleComponent = new VisibleComponent(); entity.add(visibleComponent); return entity; } public Entity createBoundary(EdgeVectors eV) { Entity entity = new Entity(); WallComponent wallComponent = new WallComponent(); wallComponent.eV = eV; entity.add(wallComponent); return entity; } public Entity createWall(WallInfo wallInfo) { Entity entity = new Entity(); StateComponent transformComponent = new StateComponent(); transformComponent.transform.setToTranslation(wallInfo.position); transformComponent.transform.rotateRad(new Vector3(0, 1, 0), wallInfo.rotAngle); transformComponent.autoTransformUpdate = false; entity.add(transformComponent); Color wallColor = new Color(156,229,251,0); ModelBuilder modelBuilder = new ModelBuilder(); Model wall = modelBuilder.createBox(wallInfo.length-0.05f,wallInfo.height,0.08f,new Material(ColorAttribute.createDiffuse(wallColor)), VertexAttributes.Usage.Position | VertexAttributes.Usage.Normal); ModelInstance wallInstance = new ModelInstance(wall); wallInstance.transform = transformComponent.transform; WallComponent wallComponent = new WallComponent(); wallComponent.eV = new EdgeVectors(wallInfo.start, wallInfo.end); wallComponent.innerWall = true; entity.add(wallComponent); GraphicsComponent graphicsComponent = new GraphicsComponent(); graphicsComponent.modelInstance = wallInstance; entity.add(graphicsComponent); VisibleComponent visibleComponent = new VisibleComponent(); entity.add(visibleComponent); return entity; } }
package org.usfirst.frc2811.RecycleRush.subsystems; import org.usfirst.frc2811.RecycleRush.Robot; import edu.wpi.first.wpilibj.CANTalon; import edu.wpi.first.wpilibj.command.Subsystem; /** * */ public class TalonSRXPIDBase extends Subsystem { //FIXME: This should not be public. hacked fix public CANTalon talonSRX; /** * Used to pass a particular motor object to the base class */ protected void useMotor(CANTalon motor){ talonSRX=motor; } /** * This is used in internal printlines and status messages */ protected String systemName="TalonSRX"; protected void setName(String name){ systemName=name; } /** * The value (in inches) that the system will attempt to set itself to. */ protected double setpoint; /**Indicate whether the setTickVelocity function will reverse the sign to invert the motor direction */ private boolean reverseMotorDirection=false; /** * This is the very bottom limit of the switch, set when the system is homed correctly * This is used by the math functions to zero the encoder ticks */ private double tickValueOfBottomLimitSwitch=0; /* * These are helper functions, used in calibrating the conversion from range to encoder ticks for the robot's current height * The default values should never be used, but are provided to avoid potential errors */ private double inchesBottom=0; private double inchesTop=10; private double ticksBottom=-100; private double ticksTop=100; private boolean softLimitsEnabled=false; private double softStopInchesBottom=-100; private double softStopInchesTop=100; private boolean homingStatus=false; /** * This is used to enable or disable the {name}Monitor command * use enable() or disable() to set it. */ private boolean heightMonitoringEnabled=true; // Put methods for controlling this subsystem // here. Call these from Commands. public void initDefaultCommand() { //Don't use this: It requires requires(), which screws up other stuff //setDefaultCommand(new BidentMonitor()); //To ensure we don't cause runtime errors, we create a fake CanTALON, just in case. //This will be overridden by the child class talonSRX=new CANTalon(0); } /** SetRange: Used to calibrate the inches-to-range conversion for helpful utility functions * This is used to calibrate the TicksTo Inches and Inches * @param inchesBottom Bottom limit of the robot's actuator (in inches) * @param inchesTop Top limit of the robot's actuator (in inches) * @param ticksBottom Bottom limit of the robot's actuator (in encoder ticks) * @param ticksTop Bottom limit of the robot's actuator (in encoder ticks) */ public void setRange(double InchesBottom, double InchesTop,double TicksBottom, double TicksTop){ inchesBottom=InchesBottom; inchesTop=InchesTop; ticksBottom=TicksBottom; ticksTop=TicksTop; } public void setSoftLimits(double InchesBottom, double InchesTop){ softStopInchesBottom=InchesBottom; softStopInchesTop=InchesTop; softLimitsEnabled=true; } /** * Use this function to declare that the system is fully homed * @param homingStatus */ public void setHomedState(boolean _homingStatus){ homingStatus = _homingStatus; System.out.println("STATUS::"+systemName+":: Homing State now set to " +homingStatus); tickValueOfBottomLimitSwitch=talonSRX.getPosition(); } /**Returns true if the system has been homed * @return */ public boolean isHomed(){ return homingStatus; } /** Sets the velocity to match the current setpoint * Intended to be called repeatedly from a command */ public void update(){ double difference=Math.abs(ticksToInches(talonSRX.getEncPosition())-setpoint); double upSpeedSlow=2; double upSpeedFast=5; double downSpeedSlow=2; double downSpeedFast=5; boolean error=false; //Do some error checking to prevent hardships if(homingStatus==false){ System.out.println("ERROR::"+systemName+"::System not homed; Home before using update"); error=true; } if(homingStatus && getHeight()>inchesTop+1){ System.out.println("ERROR::"+systemName+"::Current height above robot limit"); error=true; } if(homingStatus && getHeight()<inchesBottom-1){ System.out.println("ERROR::"+systemName+"::Current height below robot limit"); error=true; } if(isBottomSwitchPressed(false)){ //System.out.println("WARNING::"+systemName+"::Current height at lower limit"); } if(isTopSwitchPressed(false)){ //System.out.println("WARNING::"+systemName+"::Current height at upper limit"); } if(error){ return; } /* * Set differential speeds depending on where we're trying to go * This is the hard-and-fast approach, but we can set various breakpoints as needed */ if(difference<0.5){ stop(false); //stop, but leave the I term to ensure we don't drop things } else if(getHeight()<setpoint && difference<2){ up(upSpeedSlow); } else if(getHeight()<setpoint){ up(upSpeedFast); } else if(getHeight()>setpoint && difference<2){ down(downSpeedSlow); } else if(getHeight()>setpoint){ down(downSpeedFast); } else{ stop(); System.out.println("ERROR::"+systemName+"::Height is nonsensical"); } } /** * Set the robot velocity in raw ticks. This should be used by every function in this class, but not used externally. * @param ticks travelled every 10 ms. */ public void setTickVelocity(double ticks){ if (reverseMotorDirection)ticks= -ticks; talonSRX.set(ticks); } /**Use this function to reverse the motor direction without fiddling with the Talon * @param reversed */ public void reverseMotorDirection(boolean reversed){ reverseMotorDirection=reversed; } /**Moves the system up at a given rate. * @param inchesPerSecond A positive value means up, a negative value means go down */ public void up(double inchesPerSecond){ //TODO: Test this, then use them exclusively when setting directions to the robot double ticksPerSecond=inchesPerSecond*ticksPerInch()/100; //set(speed) requires units of ticks/10ms, setTickVelocity(ticksPerSecond); } public void up(){ up(5); } /**Moves the system downward at a given rate. * @param inchesPerSecond A positive value means up, a negative value means go down */ public void down(double inchesPerSecond){ up(-inchesPerSecond); //Shorthand, save typing by making Up do everything } public void down(){ down(5); } public void stop(boolean clearIAccumulation){ talonSRX.set(0); if(clearIAccumulation)talonSRX.ClearIaccum(); } public void stop(){ stop(true); } public void disable(){ heightMonitoringEnabled=false; stop(false);//This seems like a logical thing to add, but maybe not } public void enable(){ heightMonitoringEnabled=true; stop(false); setHeight(getHeight()); } public boolean isEnabled() { return heightMonitoringEnabled; } public double getTargetHeight(){ return setpoint; } public double getHeight(){ //TODO: Test function for correctness //return ticksToInches(talonSRX.getPosition()-tickValueOfBottomLimitSwitch); return -1; } public void setHeight(double inches){ setpoint=inches; } public boolean isOnTarget(double tolerance){ double difference=Math.abs(getHeight()-setpoint); return difference<Math.abs(tolerance); } public boolean isOnTarget(){ return isOnTarget(1); } public boolean isBottomSwitchPressed(boolean includeSoftLimits){ /* Note, the change in nomenclature between Bottom and Reverse: * Reverse is relative to the motor, which we (as a general rule) don't care about. * Bottom ensures that we always grab the intended switch elsewhere in our code */ if(softLimitsEnabled && includeSoftLimits && homingStatus==true){ return getHeight()<inchesBottom; } return talonSRX.isRevLimitSwitchClosed(); //TODO: Ensure that this is in fact the bottom switch } public boolean isBottomSwitchPressed(){ return isBottomSwitchPressed(true); } public boolean isTopSwitchPressed(boolean includeSoftLimits){ if(softLimitsEnabled && includeSoftLimits && homingStatus==true){ return getHeight()>inchesTop; } return talonSRX.isFwdLimitSwitchClosed();//TODO: Ensure that this is in fact the top switch } public boolean isTopSwitchPressed(){ return isTopSwitchPressed(true); } public void printStatus(){ //System.out.printf("Switches :: Homed? (%5b) Top(%5b) Bottom(%5b) onT($5b)\n",homingStatus,isTopSwitchPressed(false),isBottomSwitchPressed(false),isOnTarget()); //System.out.printf("Inches :: Current(%5f) Top(%5f) Bottom(%5f) Set(%5f)\n",getHeight(),inchesTop,inchesBottom, setpoint); //System.out.printf("Ticks :: Current(%5f) Top(%5f) Bottom(%5f) Set(%5f)\n",bident.getEncPosition(),ticksTop-tickValueOfBottomLimitSwitch,inchesBottom-tickValueOfBottomLimitSwitch, inchesToTicks(setpoint)); if(softLimitsEnabled){ //System.out.printf("SoftStops:: Current(%5f) Top(%5f) Bottom(%5f) TopHit(%5d) BotHit(%5d)\b",getHeight(),softStopInchesTop,softStopInchesTop, isTopSwitchPressed(true),isBottomSwitchPressed(true)); } } /* * Math utility functions to simplify conversions * TODO: Test utility functions for correctness */ /** * @return Number of ticks corresponding to 1 inch height */ public double ticksPerInch(){ return Math.abs((ticksBottom-ticksTop)/(inchesBottom-inchesTop)); } /** * @return number of inches in 1 encoder tick */ public double inchesPerTick(){ return Math.abs((inchesBottom-inchesTop)/(ticksBottom-ticksTop)); } /**Uses Map to convert data using values set in setRange * @param inches * @return */ public double ticksToInches(double ticks){ return map(ticks,ticksBottom-tickValueOfBottomLimitSwitch,ticksTop-tickValueOfBottomLimitSwitch,inchesBottom,inchesTop); } /**Shortcut to convert inches to ticks, using values defined with setRange * @param inches * @return encoder ticks corresponding to that height */ public double inchesToTicks(double inches){ return map(inches,inchesBottom,inchesTop,ticksBottom-tickValueOfBottomLimitSwitch,ticksTop-tickValueOfBottomLimitSwitch); } /** * Converts from one data range to another. * @param input * @param maximum * @param minimum * @param outputMax * @param outputMin * @return */ protected double map( double input, double maximum, double minimum, double outputMax, double outputMin){ double output = (input/(maximum-minimum)-minimum/(maximum-minimum))*(outputMax-outputMin)+outputMin; if (output==Double.NaN){ output=minimum;//Shouldn't happen unless we divide by zero somewhere } return output; } /** * Helper function to restrict a particular value to a given range * @param value * @param min: Minimum value that will be returned * @param max: Maximum value that will be returned * @return Unmodified value if it's within min and max, otherwise return min or max value */ protected double constrain(double value, double min, double max){ if(value>max)value=max; if(value<min)value=min; return value; } }
/* * Copyright 2012 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.eureka; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response.Status; import java.net.InetAddress; import java.net.URL; import java.net.UnknownHostException; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.SynchronousQueue; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; import com.google.common.util.concurrent.ThreadFactoryBuilder; import com.netflix.appinfo.InstanceInfo; import com.netflix.appinfo.InstanceInfo.ActionType; import com.netflix.discovery.EurekaIdentityHeaderFilter; import com.netflix.discovery.TimedSupervisorTask; import com.netflix.discovery.shared.Application; import com.netflix.discovery.shared.Applications; import com.netflix.discovery.shared.EurekaJerseyClient; import com.netflix.discovery.shared.EurekaJerseyClientImpl.EurekaJerseyClientBuilder; import com.netflix.discovery.shared.LookupService; import com.netflix.servo.monitor.Monitors; import com.netflix.servo.monitor.Stopwatch; import com.sun.jersey.api.client.ClientResponse; import com.sun.jersey.api.client.filter.GZIPContentEncodingFilter; import com.sun.jersey.client.apache4.ApacheHttpClient4; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Handles all registry operations that needs to be done on a eureka service running in an other region. * * The primary operations include fetching registry information from remote region and fetching delta information * on a periodic basis. * * @author Karthik Ranganathan * */ public class RemoteRegionRegistry implements LookupService<String> { private static final Logger logger = LoggerFactory.getLogger(RemoteRegionRegistry.class); private static final EurekaServerConfig EUREKA_SERVER_CONFIG = EurekaServerConfigurationManager.getInstance().getConfiguration(); private final ApacheHttpClient4 discoveryApacheClient; private final EurekaJerseyClient discoveryJerseyClient; private final com.netflix.servo.monitor.Timer fetchRegistryTimer; private final URL remoteRegionURL; private final ScheduledExecutorService scheduler; // monotonically increasing generation counter to ensure stale threads do not reset registry to an older version private final AtomicLong fullRegistryGeneration = new AtomicLong(0); private final AtomicLong deltaGeneration = new AtomicLong(0); private final AtomicReference<Applications> applications = new AtomicReference<Applications>(); private final AtomicReference<Applications> applicationsDelta = new AtomicReference<Applications>(); private volatile boolean readyForServingData; public RemoteRegionRegistry(String regionName, URL remoteRegionURL) { this.remoteRegionURL = remoteRegionURL; this.fetchRegistryTimer = Monitors.newTimer(this.remoteRegionURL.toString() + "_FetchRegistry"); EurekaJerseyClientBuilder clientBuilder = new EurekaJerseyClientBuilder() .withUserAgent("Java-EurekaClient-RemoteRegion") .withConnectionTimeout(EUREKA_SERVER_CONFIG.getRemoteRegionConnectTimeoutMs()) .withReadTimeout(EUREKA_SERVER_CONFIG.getRemoteRegionReadTimeoutMs()) .withMaxConnectionsPerHost(EUREKA_SERVER_CONFIG.getRemoteRegionTotalConnectionsPerHost()) .withMaxTotalConnections(EUREKA_SERVER_CONFIG.getRemoteRegionTotalConnections()) .withConnectionIdleTimeout(EUREKA_SERVER_CONFIG.getRemoteRegionConnectionIdleTimeoutSeconds()); if (remoteRegionURL.getProtocol().equals("http")) { clientBuilder.withClientName("Discovery-RemoteRegionClient-" + regionName); } else if ("true".equals(System.getProperty("com.netflix.eureka.shouldSSLConnectionsUseSystemSocketFactory"))) { clientBuilder.withClientName("Discovery-RemoteRegionSystemSecureClient-" + regionName) .withSystemSSLConfiguration(); } else { clientBuilder.withClientName("Discovery-RemoteRegionSecureClient-" + regionName) .withTrustStoreFile( EUREKA_SERVER_CONFIG.getRemoteRegionTrustStore(), EUREKA_SERVER_CONFIG.getRemoteRegionTrustStorePassword() ); } discoveryJerseyClient = clientBuilder.build(); discoveryApacheClient = discoveryJerseyClient.getClient(); // should we enable GZip decoding of responses based on Response // Headers? if (EUREKA_SERVER_CONFIG.shouldGZipContentFromRemoteRegion()) { // compressed only if there exists a 'Content-Encoding' header // whose value is "gzip" discoveryApacheClient .addFilter(new GZIPContentEncodingFilter(false)); } String ip = null; try { ip = InetAddress.getLocalHost().getHostAddress(); } catch (UnknownHostException e) { logger.warn("Cannot find localhost ip", e); } EurekaServerIdentity identity = new EurekaServerIdentity(ip); discoveryApacheClient.addFilter(new EurekaIdentityHeaderFilter(identity)); applications.set(new Applications()); try { if (fetchRegistry()) { this.readyForServingData = true; } else { logger.warn("Failed to fetch remote registry. This means this eureka server is not ready for serving " + "traffic."); } } catch (Throwable e) { logger.error("Problem fetching registry information :", e); } // remote region fetch Runnable remoteRegionFetchTask = new Runnable() { @Override public void run() { try { if (fetchRegistry()) { readyForServingData = true; } else { logger.warn("Failed to fetch remote registry. This means this eureka server is not " + "ready for serving traffic."); } } catch (Throwable e) { logger.error( "Error getting from remote registry :", e); } } }; ThreadPoolExecutor remoteRegionFetchExecutor = new ThreadPoolExecutor( 1, EUREKA_SERVER_CONFIG.getRemoteRegionFetchThreadPoolSize(), 0, TimeUnit.SECONDS, new SynchronousQueue<Runnable>()); // use direct handoff scheduler = Executors.newScheduledThreadPool(1, new ThreadFactoryBuilder() .setNameFormat("Eureka-RemoteRegionCacheRefresher_" + regionName + "-%d") .setDaemon(true) .build()); scheduler.schedule( new TimedSupervisorTask( "RemoteRegionFetch_" + regionName, scheduler, remoteRegionFetchExecutor, EUREKA_SERVER_CONFIG.getRemoteRegionRegistryFetchInterval(), TimeUnit.SECONDS, 5, // exponential backoff bound remoteRegionFetchTask ), EUREKA_SERVER_CONFIG.getRemoteRegionRegistryFetchInterval(), TimeUnit.SECONDS); } /** * Check if this registry is ready for serving data. * @return true if ready, false otherwise. */ public boolean isReadyForServingData() { return readyForServingData; } /** * Fetch the registry information from the remote region. * @return true, if the fetch was successful, false otherwise. */ private boolean fetchRegistry() { ClientResponse response = null; Stopwatch tracer = fetchRegistryTimer.start(); try { // If the delta is disabled or if it is the first time, get all // applications if (EUREKA_SERVER_CONFIG.shouldDisableDeltaForRemoteRegions() || (getApplications() == null) || (getApplications().getRegisteredApplications().size() == 0)) { logger.info("Disable delta property : {}", EUREKA_SERVER_CONFIG .shouldDisableDeltaForRemoteRegions()); logger.info("Application is null : {}", (getApplications() == null)); logger.info( "Registered Applications size is zero : {}", (getApplications().getRegisteredApplications().size() == 0)); response = storeFullRegistry(); } else { long currDeltaGeneration = deltaGeneration.get(); Applications delta = null; response = fetchRemoteRegistry(true); if (null != response) { if (response.getStatus() == Status.OK.getStatusCode()) { delta = response.getEntity(Applications.class); if (delta == null) { logger.error("The delta is null for some reason. Not storing this information"); } else if (deltaGeneration.compareAndSet(currDeltaGeneration, currDeltaGeneration + 1)) { this.applicationsDelta.set(delta); } else { delta = null; // set the delta to null so we don't use it logger.warn("Not updating delta as another thread is updating it already"); } } if (delta == null) { logger.warn("The server does not allow the delta revision to be applied because it is not " + "safe. Hence got the full registry."); this.closeResponse(response); response = fetchRemoteRegistry(true); } else { updateDelta(delta); String reconcileHashCode = getApplications() .getReconcileHashCode(); // There is a diff in number of instances for some reason if ((!reconcileHashCode.equals(delta.getAppsHashCode()))) { response = reconcileAndLogDifference(response, delta, reconcileHashCode); } } } } logTotalInstances(); logger.debug("Remote Registry Fetch Status : {}", null == response ? null : response.getStatus()); } catch (Throwable e) { logger.error( "Unable to fetch registry information from the remote registry " + this.remoteRegionURL.toString(), e); return false; } finally { if (tracer != null) { tracer.stop(); } closeResponse(response); } return null != response; } /** * Updates the delta information fetches from the eureka server into the * local cache. * * @param delta * the delta information received from eureka server in the last * poll cycle. */ private void updateDelta(Applications delta) { int deltaCount = 0; for (Application app : delta.getRegisteredApplications()) { for (InstanceInfo instance : app.getInstances()) { ++deltaCount; if (ActionType.ADDED.equals(instance.getActionType())) { Application existingApp = getApplications() .getRegisteredApplications(instance.getAppName()); if (existingApp == null) { getApplications().addApplication(app); } logger.debug("Added instance {} to the existing apps ", instance.getId()); getApplications().getRegisteredApplications( instance.getAppName()).addInstance(instance); } else if (ActionType.MODIFIED.equals(instance.getActionType())) { Application existingApp = getApplications() .getRegisteredApplications(instance.getAppName()); if (existingApp == null) { getApplications().addApplication(app); } logger.debug("Modified instance {} to the existing apps ", instance.getId()); getApplications().getRegisteredApplications( instance.getAppName()).addInstance(instance); } else if (ActionType.DELETED.equals(instance.getActionType())) { Application existingApp = getApplications() .getRegisteredApplications(instance.getAppName()); if (existingApp == null) { getApplications().addApplication(app); } logger.debug("Deleted instance {} to the existing apps ", instance.getId()); getApplications().getRegisteredApplications( instance.getAppName()).removeInstance(instance); } } } logger.debug( "The total number of instances fetched by the delta processor : {}", deltaCount); } /** * Close HTTP response object and its respective resources. * * @param response * the HttpResponse object. */ private void closeResponse(ClientResponse response) { if (response != null) { try { response.close(); } catch (Throwable th) { logger.error("Cannot release response resource :", th); } } } /** * Gets the full registry information from the eureka server and stores it * locally. * * @return the full registry information. */ public ClientResponse storeFullRegistry() { long currentUpdateGeneration = fullRegistryGeneration.get(); ClientResponse response = fetchRemoteRegistry(false); if (response == null) { logger.error("The response is null."); return null; } Applications apps = response.getEntity(Applications.class); if (apps == null) { logger.error("The application is null for some reason. Not storing this information"); } else if (fullRegistryGeneration.compareAndSet(currentUpdateGeneration, currentUpdateGeneration + 1)) { applications.set(apps); } else { logger.warn("Not updating applications as another thread is updating it already"); } logger.info("The response status is {}", response.getStatus()); return response; } /** * Fetch registry information from the remote region. * @param delta - true, if the fetch needs to get deltas, false otherwise * @return - response which has information about the data. */ private ClientResponse fetchRemoteRegistry(boolean delta) { logger.info( "Getting instance registry info from the eureka server : {} , delta : {}", this.remoteRegionURL, delta); ClientResponse response = null; try { String urlPath = delta ? "apps/delta" : "apps/"; response = discoveryApacheClient .resource(this.remoteRegionURL.toString() + urlPath) .accept(MediaType.APPLICATION_JSON_TYPE) .get(ClientResponse.class); int httpStatus = response.getStatus(); if (httpStatus >= 200 && httpStatus < 300) { logger.debug("Got the data successfully : {}", httpStatus); } else { logger.warn("Cannot get the data from {} : {}", this.remoteRegionURL.toString(), httpStatus); return null; // To short circuit entity evaluation. } } catch (Throwable t) { logger.error("Can't get a response from " + this.remoteRegionURL, t); } return response; } /** * Reconciles the delta information fetched to see if the hashcodes match. * * @param response - the response of the delta fetch. * @param delta - the delta information fetched previously for reconcililation. * @param reconcileHashCode - the hashcode for comparison. * @return - response * @throws Throwable */ private ClientResponse reconcileAndLogDifference(ClientResponse response, Applications delta, String reconcileHashCode) throws Throwable { logger.warn( "The Reconcile hashcodes do not match, client : {}, server : {}. Getting the full registry", reconcileHashCode, delta.getAppsHashCode()); this.closeResponse(response); response = this.fetchRemoteRegistry(false); if (null == response) { logger.warn("Response is null while fetching remote registry during reconcile difference."); return null; } Applications serverApps = response.getEntity(Applications.class); Map<String, List<String>> reconcileDiffMap = getApplications() .getReconcileMapDiff(serverApps); String reconcileString = ""; for (Map.Entry<String, List<String>> mapEntry : reconcileDiffMap .entrySet()) { reconcileString = reconcileString + mapEntry.getKey() + ": "; for (String displayString : mapEntry.getValue()) { reconcileString = reconcileString + displayString; } reconcileString = reconcileString + "\n"; } logger.warn("The reconcile string is {}", reconcileString); applications.set(serverApps); applicationsDelta.set(serverApps); logger.warn( "The Reconcile hashcodes after complete sync up, client : {}, server : {}.", getApplications().getReconcileHashCode(), delta.getAppsHashCode()); return response; } /** * Logs the total number of non-filtered instances stored locally. */ private void logTotalInstances() { int totInstances = 0; for (Application application : getApplications().getRegisteredApplications()) { totInstances += application.getInstancesAsIsFromEureka().size(); } logger.debug("The total number of all instances in the client now is {}", totInstances); } @Override public Applications getApplications() { return applications.get(); } @Override public InstanceInfo getNextServerFromEureka(String arg0, boolean arg1) { return null; } @Override public Application getApplication(String appName) { return this.applications.get().getRegisteredApplications(appName); } @Override public List<InstanceInfo> getInstancesById(String id) { List<InstanceInfo> list = Collections.emptyList(); for (Application app : applications.get().getRegisteredApplications()) { InstanceInfo info = app.getByInstanceId(id); if (info != null) { list.add(info); return list; } } return list; } public Applications getApplicationDeltas() { return this.applicationsDelta.get(); } }
/* * Copyright 2018 MovingBlocks * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.terasology.persistence.internal; import com.google.common.collect.Lists; import org.jboss.shrinkwrap.api.ShrinkWrap; import org.jboss.shrinkwrap.api.nio.file.ShrinkWrapFileSystems; import org.jboss.shrinkwrap.api.spec.JavaArchive; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.MethodOrderer; import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.TestMethodOrder; import org.mockito.Matchers; import org.terasology.TerasologyTestingEnvironment; import org.terasology.assets.ResourceUrn; import org.terasology.assets.management.AssetManager; import org.terasology.engine.bootstrap.EntitySystemSetupUtil; import org.terasology.engine.module.ModuleManager; import org.terasology.engine.paths.PathManager; import org.terasology.entitySystem.entity.EntityRef; import org.terasology.entitySystem.entity.internal.EngineEntityManager; import org.terasology.entitySystem.stubs.EntityRefComponent; import org.terasology.entitySystem.stubs.StringComponent; import org.terasology.logic.location.LocationComponent; import org.terasology.math.geom.Vector3f; import org.terasology.math.geom.Vector3i; import org.terasology.module.ModuleEnvironment; import org.terasology.network.Client; import org.terasology.network.ClientComponent; import org.terasology.network.NetworkMode; import org.terasology.network.NetworkSystem; import org.terasology.persistence.ChunkStore; import org.terasology.persistence.PlayerStore; import org.terasology.persistence.StorageManager; import org.terasology.recording.CharacterStateEventPositionMap; import org.terasology.recording.DirectionAndOriginPosRecorderList; import org.terasology.recording.RecordAndReplayCurrentStatus; import org.terasology.recording.RecordAndReplaySerializer; import org.terasology.recording.RecordAndReplayUtils; import org.terasology.recording.RecordedEventStore; import org.terasology.reflection.TypeRegistry; import org.terasology.registry.CoreRegistry; import org.terasology.world.WorldProvider; import org.terasology.world.block.Block; import org.terasology.world.block.BlockManager; import org.terasology.world.block.family.SymmetricFamily; import org.terasology.world.block.loader.BlockFamilyDefinition; import org.terasology.world.block.loader.BlockFamilyDefinitionData; import org.terasology.world.chunks.Chunk; import org.terasology.world.chunks.ChunkProvider; import org.terasology.world.chunks.blockdata.ExtraBlockDataManager; import org.terasology.world.chunks.internal.ChunkImpl; import org.terasology.world.internal.WorldInfo; import java.io.File; import java.io.IOException; import java.nio.file.FileSystem; import java.nio.file.Files; import java.nio.file.Path; import java.util.Arrays; import java.util.List; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @TestMethodOrder(MethodOrderer.OrderAnnotation.class) public class StorageManagerTest extends TerasologyTestingEnvironment { public static final String PLAYER_ID = "someId"; public static final Vector3i CHUNK_POS = new Vector3i(1, 2, 3); private static File temporaryFolder; private ModuleEnvironment moduleEnvironment; private ReadWriteStorageManager esm; private EngineEntityManager entityManager; private BlockManager blockManager; private ExtraBlockDataManager extraDataManager; private Block testBlock; private Block testBlock2; private EntityRef character; private Path savePath; private RecordAndReplaySerializer recordAndReplaySerializer; private RecordAndReplayUtils recordAndReplayUtils; private RecordAndReplayCurrentStatus recordAndReplayCurrentStatus; @BeforeAll static void createFolder() throws IOException { File createdFolder = File.createTempFile("junit", "", null); createdFolder.delete(); createdFolder.mkdir(); temporaryFolder = createdFolder; } @BeforeEach public void setup() throws Exception { super.setup(); JavaArchive homeArchive = ShrinkWrap.create(JavaArchive.class); FileSystem vfs = ShrinkWrapFileSystems.newFileSystem(homeArchive); PathManager.getInstance().useOverrideHomePath(temporaryFolder.toPath()); savePath = PathManager.getInstance().getSavePath("testSave"); assert !Files.isRegularFile(vfs.getPath("global.dat")); entityManager = context.get(EngineEntityManager.class); moduleEnvironment = mock(ModuleEnvironment.class); blockManager = context.get(BlockManager.class); extraDataManager = context.get(ExtraBlockDataManager.class); ModuleManager moduleManager = mock(ModuleManager.class); when(moduleManager.getEnvironment()).thenReturn(moduleEnvironment); RecordedEventStore recordedEventStore = new RecordedEventStore(); recordAndReplayUtils = new RecordAndReplayUtils(); CharacterStateEventPositionMap characterStateEventPositionMap = new CharacterStateEventPositionMap(); DirectionAndOriginPosRecorderList directionAndOriginPosRecorderList = new DirectionAndOriginPosRecorderList(); recordAndReplaySerializer = new RecordAndReplaySerializer(entityManager, recordedEventStore, recordAndReplayUtils, characterStateEventPositionMap, directionAndOriginPosRecorderList, moduleManager, mock(TypeRegistry.class)); recordAndReplayCurrentStatus = context.get(RecordAndReplayCurrentStatus.class); esm = new ReadWriteStorageManager(savePath, moduleEnvironment, entityManager, blockManager, extraDataManager, false, recordAndReplaySerializer, recordAndReplayUtils, recordAndReplayCurrentStatus); context.put(StorageManager.class, esm); this.character = entityManager.create(); Client client = createClientMock(PLAYER_ID, character); NetworkSystem networkSystem = mock(NetworkSystem.class); when(networkSystem.getMode()).thenReturn(NetworkMode.NONE); when(networkSystem.getPlayers()).thenReturn(Arrays.asList(client)); context.put(NetworkSystem.class, networkSystem); AssetManager assetManager = context.get(AssetManager.class); BlockFamilyDefinitionData data = new BlockFamilyDefinitionData(); data.setBlockFamily(SymmetricFamily.class); assetManager.loadAsset(new ResourceUrn("test:testblock"), data, BlockFamilyDefinition.class); assetManager.loadAsset(new ResourceUrn("test:testblock2"), data, BlockFamilyDefinition.class); testBlock = context.get(BlockManager.class).getBlock("test:testblock"); testBlock2 = context.get(BlockManager.class).getBlock("test:testblock2"); context.put(ChunkProvider.class, mock(ChunkProvider.class)); WorldProvider worldProvider = mock(WorldProvider.class); when(worldProvider.getWorldInfo()).thenReturn(new WorldInfo()); context.put(WorldProvider.class, worldProvider); } private Client createClientMock(String clientId, EntityRef charac) { EntityRef clientEntity = createClientEntity(charac); Client client = mock(Client.class); when(client.getEntity()).thenReturn(clientEntity); when(client.getId()).thenReturn(clientId); return client; } private EntityRef createClientEntity(EntityRef charac) { ClientComponent clientComponent = new ClientComponent(); clientComponent.local = true; clientComponent.character = charac; EntityRef clientEntity = entityManager.create(clientComponent); return clientEntity; } @Test @Order(1) public void testGetUnstoredPlayerReturnsNewStor() { PlayerStore store = esm.loadPlayerStore(PLAYER_ID); assertNotNull(store); assertEquals(new Vector3f(), store.getRelevanceLocation()); assertFalse(store.hasCharacter()); assertEquals(PLAYER_ID, store.getId()); } @Test public void testStoreAndRestoreOfPlayerWithoutCharacter() { // remove character from player: character.destroy(); esm.waitForCompletionOfPreviousSaveAndStartSaving(); esm.finishSavingAndShutdown(); PlayerStore restoredStore = esm.loadPlayerStore(PLAYER_ID); assertNotNull(restoredStore); assertFalse(restoredStore.hasCharacter()); assertEquals(new Vector3f(), restoredStore.getRelevanceLocation()); } @Test public void testPlayerRelevanceLocationSurvivesStorage() { Vector3f loc = new Vector3f(1, 2, 3); character.addComponent(new LocationComponent(loc)); esm.waitForCompletionOfPreviousSaveAndStartSaving(); esm.finishSavingAndShutdown(); PlayerStore restored = esm.loadPlayerStore(PLAYER_ID); assertEquals(loc, restored.getRelevanceLocation()); } @Test public void testCharacterSurvivesStorage() { esm.waitForCompletionOfPreviousSaveAndStartSaving(); esm.finishSavingAndShutdown(); PlayerStore restored = esm.loadPlayerStore(PLAYER_ID); restored.restoreEntities(); assertTrue(restored.hasCharacter()); assertEquals(character, restored.getCharacter()); } @Test public void testGlobalEntitiesStoredAndRestored() throws Exception { EntityRef entity = entityManager.create(new StringComponent("Test")); long entityId = entity.getId(); esm.waitForCompletionOfPreviousSaveAndStartSaving(); esm.finishSavingAndShutdown(); EntitySystemSetupUtil.addReflectionBasedLibraries(context); EntitySystemSetupUtil.addEntityManagementRelatedClasses(context); EngineEntityManager newEntityManager = context.get(EngineEntityManager.class); StorageManager newSM = new ReadWriteStorageManager(savePath, moduleEnvironment, newEntityManager, blockManager, extraDataManager, false, recordAndReplaySerializer, recordAndReplayUtils, recordAndReplayCurrentStatus); newSM.loadGlobalStore(); List<EntityRef> entities = Lists.newArrayList(newEntityManager.getEntitiesWith(StringComponent.class)); assertEquals(1, entities.size()); assertEquals(entityId, entities.get(0).getId()); } @Test public void testReferenceRemainsValidOverStorageRestoral() throws Exception { EntityRef someEntity = entityManager.create(); character.addComponent(new EntityRefComponent(someEntity)); esm.waitForCompletionOfPreviousSaveAndStartSaving(); esm.finishSavingAndShutdown(); EntitySystemSetupUtil.addReflectionBasedLibraries(context); EntitySystemSetupUtil.addEntityManagementRelatedClasses(context); EngineEntityManager newEntityManager = context.get(EngineEntityManager.class); StorageManager newSM = new ReadWriteStorageManager(savePath, moduleEnvironment, newEntityManager, blockManager, extraDataManager, false, recordAndReplaySerializer, recordAndReplayUtils, recordAndReplayCurrentStatus); newSM.loadGlobalStore(); PlayerStore restored = newSM.loadPlayerStore(PLAYER_ID); restored.restoreEntities(); assertTrue(restored.getCharacter().getComponent(EntityRefComponent.class).entityRef.exists()); } @Test public void testGetUnstoredChunkReturnsNothing() { esm.loadChunkStore(CHUNK_POS); } @Test public void testStoreAndRestoreChunkStore() { Chunk chunk = new ChunkImpl(CHUNK_POS, blockManager, extraDataManager); chunk.setBlock(0, 0, 0, testBlock); chunk.markReady(); ChunkProvider chunkProvider = mock(ChunkProvider.class); when(chunkProvider.getAllChunks()).thenReturn(Arrays.asList(chunk)); CoreRegistry.put(ChunkProvider.class, chunkProvider); esm.waitForCompletionOfPreviousSaveAndStartSaving(); esm.finishSavingAndShutdown(); ChunkStore restored = esm.loadChunkStore(CHUNK_POS); assertNotNull(restored); assertEquals(CHUNK_POS, restored.getChunkPosition()); assertNotNull(restored.getChunk()); assertEquals(testBlock, restored.getChunk().getBlock(0, 0, 0)); } @Test public void testChunkSurvivesStorageSaveAndRestore() throws Exception { Chunk chunk = new ChunkImpl(CHUNK_POS, blockManager, extraDataManager); chunk.setBlock(0, 0, 0, testBlock); chunk.setBlock(0, 4, 2, testBlock2); chunk.markReady(); ChunkProvider chunkProvider = mock(ChunkProvider.class); when(chunkProvider.getAllChunks()).thenReturn(Arrays.asList(chunk)); when(chunkProvider.getChunk(Matchers.any(Vector3i.class))).thenReturn(chunk); CoreRegistry.put(ChunkProvider.class, chunkProvider); boolean storeChunkInZips = true; esm.setStoreChunksInZips(storeChunkInZips); esm.waitForCompletionOfPreviousSaveAndStartSaving(); esm.finishSavingAndShutdown(); EntitySystemSetupUtil.addReflectionBasedLibraries(context); EntitySystemSetupUtil.addEntityManagementRelatedClasses(context); EngineEntityManager newEntityManager = context.get(EngineEntityManager.class); StorageManager newSM = new ReadWriteStorageManager(savePath, moduleEnvironment, newEntityManager, blockManager, extraDataManager, storeChunkInZips, recordAndReplaySerializer, recordAndReplayUtils, recordAndReplayCurrentStatus); newSM.loadGlobalStore(); ChunkStore restored = newSM.loadChunkStore(CHUNK_POS); assertNotNull(restored); assertEquals(CHUNK_POS, restored.getChunkPosition()); assertNotNull(restored.getChunk()); assertEquals(testBlock, restored.getChunk().getBlock(0, 0, 0)); assertEquals(testBlock2, restored.getChunk().getBlock(0, 4, 2)); } @Test public void testEntitySurvivesStorageInChunkStore() throws Exception { Chunk chunk = new ChunkImpl(CHUNK_POS, blockManager, extraDataManager); chunk.setBlock(0, 0, 0, testBlock); chunk.markReady(); ChunkProvider chunkProvider = mock(ChunkProvider.class); when(chunkProvider.getAllChunks()).thenReturn(Arrays.asList(chunk)); CoreRegistry.put(ChunkProvider.class, chunkProvider); EntityRef entity = entityManager.create(); long id = entity.getId(); LocationComponent locationComponent = new LocationComponent(); Vector3f positionInChunk = new Vector3f(chunk.getAABB().getMin()); positionInChunk.x += 1; positionInChunk.y += 1; positionInChunk.z += 1; locationComponent.setWorldPosition(positionInChunk); entity.addComponent(locationComponent); esm.waitForCompletionOfPreviousSaveAndStartSaving(); esm.finishSavingAndShutdown(); EntitySystemSetupUtil.addReflectionBasedLibraries(context); EntitySystemSetupUtil.addEntityManagementRelatedClasses(context); EngineEntityManager newEntityManager = context.get(EngineEntityManager.class); StorageManager newSM = new ReadWriteStorageManager(savePath, moduleEnvironment, newEntityManager, blockManager, extraDataManager, false, recordAndReplaySerializer, recordAndReplayUtils, recordAndReplayCurrentStatus); newSM.loadGlobalStore(); ChunkStore restored = newSM.loadChunkStore(CHUNK_POS); restored.restoreEntities(); EntityRef ref = newEntityManager.getEntity(id); assertTrue(ref.exists()); assertTrue(ref.isActive()); } @Test public void testCanSavePlayerWithoutUnloading() throws Exception { esm.waitForCompletionOfPreviousSaveAndStartSaving(); esm.finishSavingAndShutdown(); assertTrue(character.isActive()); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.falcon.oozie.process; import org.apache.commons.lang3.StringUtils; import org.apache.falcon.FalconException; import org.apache.falcon.LifeCycle; import org.apache.falcon.Tag; import org.apache.falcon.entity.CatalogStorage; import org.apache.falcon.entity.EntityUtil; import org.apache.falcon.entity.FeedHelper; import org.apache.falcon.entity.ProcessHelper; import org.apache.falcon.entity.Storage; import org.apache.falcon.entity.v0.EntityType; import org.apache.falcon.entity.v0.Frequency; import org.apache.falcon.entity.v0.SchemaHelper; import org.apache.falcon.entity.v0.cluster.Cluster; import org.apache.falcon.entity.v0.feed.Feed; import org.apache.falcon.entity.v0.feed.LocationType; import org.apache.falcon.entity.v0.process.Input; import org.apache.falcon.entity.v0.process.Output; import org.apache.falcon.entity.v0.process.Process; import org.apache.falcon.expression.ExpressionHelper; import org.apache.falcon.oozie.OozieCoordinatorBuilder; import org.apache.falcon.oozie.OozieEntityBuilder; import org.apache.falcon.oozie.OozieOrchestrationWorkflowBuilder; import org.apache.falcon.oozie.coordinator.CONTROLS; import org.apache.falcon.oozie.coordinator.COORDINATORAPP; import org.apache.falcon.oozie.coordinator.DATAIN; import org.apache.falcon.oozie.coordinator.DATAOUT; import org.apache.falcon.oozie.coordinator.DATASETS; import org.apache.falcon.oozie.coordinator.INPUTEVENTS; import org.apache.falcon.oozie.coordinator.OUTPUTEVENTS; import org.apache.falcon.oozie.coordinator.SYNCDATASET; import org.apache.falcon.oozie.coordinator.WORKFLOW; import org.apache.falcon.workflow.WorkflowExecutionArgs; import org.apache.hadoop.fs.Path; import java.util.Arrays; import java.util.LinkedList; import java.util.List; import java.util.Properties; /** * Builds oozie coordinator for process. */ public class ProcessExecutionCoordinatorBuilder extends OozieCoordinatorBuilder<Process> { private static final int THIRTY_MINUTES = 30 * 60 * 1000; public ProcessExecutionCoordinatorBuilder(Process entity) { super(entity, LifeCycle.EXECUTION); } @Override public List<Properties> buildCoords(Cluster cluster, Path buildPath) throws FalconException { String coordName = getEntityName(); Path coordPath = getBuildPath(buildPath); copySharedLibs(cluster, new Path(coordPath, "lib")); COORDINATORAPP coord = new COORDINATORAPP(); // coord attributes initializeCoordAttributes(cluster, coord, coordName); CONTROLS controls = initializeControls(); // controls coord.setControls(controls); // Configuration Properties props = createCoordDefaultConfiguration(coordName); initializeInputPaths(cluster, coord, props); // inputs initializeOutputPaths(cluster, coord, props); // outputs // create parent wf Properties wfProps = OozieOrchestrationWorkflowBuilder.get(entity, cluster, Tag.DEFAULT).build(cluster, coordPath); WORKFLOW wf = new WORKFLOW(); wf.setAppPath(getStoragePath(wfProps.getProperty(OozieEntityBuilder.ENTITY_PATH))); // Add the custom properties set in feed. Else, dryrun won't catch any missing props. props.putAll(EntityUtil.getEntityProperties(entity)); wf.setConfiguration(getConfig(props)); // set coord action to parent wf org.apache.falcon.oozie.coordinator.ACTION action = new org.apache.falcon.oozie.coordinator.ACTION(); action.setWorkflow(wf); coord.setAction(action); Path marshalPath = marshal(cluster, coord, coordPath); return Arrays.asList(getProperties(marshalPath, coordName)); } private void initializeCoordAttributes(Cluster cluster, COORDINATORAPP coord, String coordName) { coord.setName(coordName); org.apache.falcon.entity.v0.process.Cluster processCluster = ProcessHelper.getCluster(entity, cluster.getName()); coord.setStart(SchemaHelper.formatDateUTC(processCluster.getValidity().getStart())); coord.setEnd(SchemaHelper.formatDateUTC(processCluster.getValidity().getEnd())); coord.setTimezone(entity.getTimezone().getID()); coord.setFrequency("${coord:" + entity.getFrequency().toString() + "}"); } private CONTROLS initializeControls() throws FalconException { CONTROLS controls = new CONTROLS(); controls.setConcurrency(String.valueOf(entity.getParallel())); controls.setExecution(entity.getOrder().name()); Frequency timeout = entity.getTimeout(); long frequencyInMillis = ExpressionHelper.get().evaluate(entity.getFrequency().toString(), Long.class); long timeoutInMillis; if (timeout != null) { timeoutInMillis = ExpressionHelper.get(). evaluate(entity.getTimeout().toString(), Long.class); } else { timeoutInMillis = frequencyInMillis * 6; if (timeoutInMillis < THIRTY_MINUTES) { timeoutInMillis = THIRTY_MINUTES; } } controls.setTimeout(String.valueOf(timeoutInMillis / (1000 * 60))); if (timeoutInMillis / frequencyInMillis * 2 > 0) { controls.setThrottle(String.valueOf(timeoutInMillis / frequencyInMillis * 2)); } return controls; } private void initializeInputPaths(Cluster cluster, COORDINATORAPP coord, Properties props) throws FalconException { if (entity.getInputs() == null) { props.put(WorkflowExecutionArgs.INPUT_FEED_NAMES.getName(), NONE); props.put(WorkflowExecutionArgs.INPUT_FEED_PATHS.getName(), NONE); props.put(WorkflowExecutionArgs.INPUT_NAMES.getName(), NONE); return; } List<String> inputFeeds = new LinkedList<>(); List<String> inputNames = new LinkedList<>(); List<String> inputPaths = new LinkedList<>(); List<String> inputFeedStorageTypes = new LinkedList<>(); for (Input input : entity.getInputs().getInputs()) { Feed feed = EntityUtil.getEntity(EntityType.FEED, input.getFeed()); Storage storage = FeedHelper.createStorage(cluster, feed); if (coord.getDatasets() == null) { coord.setDatasets(new DATASETS()); } SYNCDATASET syncdataset = createDataSet(feed, cluster, storage, input.getName(), LocationType.DATA); if (syncdataset == null) { return; } coord.getDatasets().getDatasetOrAsyncDataset().add(syncdataset); if (!input.isOptional()) { if (coord.getInputEvents() == null) { coord.setInputEvents(new INPUTEVENTS()); } DATAIN datain = createDataIn(input); coord.getInputEvents().getDataIn().add(datain); } String inputExpr = null; if (storage.getType() == Storage.TYPE.FILESYSTEM) { inputExpr = getELExpression("dataIn('" + input.getName() + "', '" + input.getPartition() + "')"); props.put(input.getName(), inputExpr); } else if (storage.getType() == Storage.TYPE.TABLE) { inputExpr = "${coord:dataIn('" + input.getName() + "')}"; propagateCatalogTableProperties(input, (CatalogStorage) storage, props); } inputFeeds.add(feed.getName()); inputPaths.add(inputExpr); inputNames.add(input.getName()); inputFeedStorageTypes.add(storage.getType().name()); } propagateLateDataProperties(inputFeeds, inputNames, inputPaths, inputFeedStorageTypes, props); } private void propagateLateDataProperties(List<String> inputFeeds, List<String> inputNames, List<String> inputPaths, List<String> inputFeedStorageTypes, Properties props) { // populate late data handler - should-record action props.put(WorkflowExecutionArgs.INPUT_FEED_NAMES.getName(), StringUtils.join(inputFeeds, '#')); props.put(WorkflowExecutionArgs.INPUT_NAMES.getName(), StringUtils.join(inputNames, '#')); props.put(WorkflowExecutionArgs.INPUT_FEED_PATHS.getName(), StringUtils.join(inputPaths, '#')); // storage type for each corresponding feed sent as a param to LateDataHandler // needed to compute usage based on storage type in LateDataHandler props.put(WorkflowExecutionArgs.INPUT_STORAGE_TYPES.getName(), StringUtils.join(inputFeedStorageTypes, '#')); } private SYNCDATASET createDataSet(Feed feed, Cluster cluster, Storage storage, String datasetName, LocationType locationType) throws FalconException { SYNCDATASET syncdataset = new SYNCDATASET(); syncdataset.setName(datasetName); syncdataset.setFrequency("${coord:" + feed.getFrequency().toString() + "}"); String uriTemplate = storage.getUriTemplate(locationType); if (uriTemplate == null) { return null; } if (storage.getType() == Storage.TYPE.TABLE) { uriTemplate = uriTemplate.replace("thrift", "hcat"); // Oozie requires this!!! } syncdataset.setUriTemplate(uriTemplate); org.apache.falcon.entity.v0.feed.Cluster feedCluster = FeedHelper.getCluster(feed, cluster.getName()); syncdataset.setInitialInstance(SchemaHelper.formatDateUTC(feedCluster.getValidity().getStart())); syncdataset.setTimezone(feed.getTimezone().getID()); if (feed.getAvailabilityFlag() == null) { syncdataset.setDoneFlag(""); } else { syncdataset.setDoneFlag(feed.getAvailabilityFlag()); } return syncdataset; } private DATAIN createDataIn(Input input) { DATAIN datain = new DATAIN(); datain.setName(input.getName()); datain.setDataset(input.getName()); datain.setStartInstance(getELExpression(input.getStart())); datain.setEndInstance(getELExpression(input.getEnd())); return datain; } private String getELExpression(String expr) { if (expr != null) { expr = "${" + expr + "}"; } return expr; } private void initializeOutputPaths(Cluster cluster, COORDINATORAPP coord, Properties props) throws FalconException { if (entity.getOutputs() == null) { props.put(WorkflowExecutionArgs.OUTPUT_FEED_NAMES.getName(), NONE); props.put(WorkflowExecutionArgs.OUTPUT_FEED_PATHS.getName(), NONE); props.put(WorkflowExecutionArgs.OUTPUT_NAMES.getName(), NONE); return; } if (coord.getDatasets() == null) { coord.setDatasets(new DATASETS()); } if (coord.getOutputEvents() == null) { coord.setOutputEvents(new OUTPUTEVENTS()); } List<String> outputFeeds = new LinkedList<>(); List<String> outputPaths = new LinkedList<>(); List<String> falconOutputNames = new LinkedList<>(); for (Output output : entity.getOutputs().getOutputs()) { Feed feed = EntityUtil.getEntity(EntityType.FEED, output.getFeed()); Storage storage = FeedHelper.createStorage(cluster, feed); SYNCDATASET syncdataset = createDataSet(feed, cluster, storage, output.getName(), LocationType.DATA); if (syncdataset == null) { return; } coord.getDatasets().getDatasetOrAsyncDataset().add(syncdataset); DATAOUT dataout = createDataOut(output); coord.getOutputEvents().getDataOut().add(dataout); String outputExpr = "${coord:dataOut('" + output.getName() + "')}"; outputFeeds.add(feed.getName()); falconOutputNames.add(output.getName()); outputPaths.add(outputExpr); if (storage.getType() == Storage.TYPE.FILESYSTEM) { props.put(output.getName(), outputExpr); propagateFileSystemProperties(output, feed, cluster, coord, storage, props); } else if (storage.getType() == Storage.TYPE.TABLE) { propagateCatalogTableProperties(output, (CatalogStorage) storage, props); } } // Output feed name and path for parent workflow props.put(WorkflowExecutionArgs.OUTPUT_FEED_NAMES.getName(), StringUtils.join(outputFeeds, ',')); props.put(WorkflowExecutionArgs.OUTPUT_NAMES.getName(), StringUtils.join(falconOutputNames, ',')); props.put(WorkflowExecutionArgs.OUTPUT_FEED_PATHS.getName(), StringUtils.join(outputPaths, ',')); } private DATAOUT createDataOut(Output output) { DATAOUT dataout = new DATAOUT(); dataout.setName(output.getName()); dataout.setDataset(output.getName()); dataout.setInstance(getELExpression(output.getInstance())); return dataout; } private void propagateFileSystemProperties(Output output, Feed feed, Cluster cluster, COORDINATORAPP coord, Storage storage, Properties props) throws FalconException { // stats and meta paths createOutputEvent(output, feed, cluster, LocationType.STATS, coord, props, storage); createOutputEvent(output, feed, cluster, LocationType.META, coord, props, storage); createOutputEvent(output, feed, cluster, LocationType.TMP, coord, props, storage); } //SUSPEND CHECKSTYLE CHECK ParameterNumberCheck private void createOutputEvent(Output output, Feed feed, Cluster cluster, LocationType locType, COORDINATORAPP coord, Properties props, Storage storage) throws FalconException { String name = output.getName(); String type = locType.name().toLowerCase(); SYNCDATASET dataset = createDataSet(feed, cluster, storage, name + type, locType); if (dataset == null) { return; } coord.getDatasets().getDatasetOrAsyncDataset().add(dataset); DATAOUT dataout = new DATAOUT(); dataout.setName(name + type); dataout.setDataset(name + type); dataout.setInstance(getELExpression(output.getInstance())); OUTPUTEVENTS outputEvents = coord.getOutputEvents(); if (outputEvents == null) { outputEvents = new OUTPUTEVENTS(); coord.setOutputEvents(outputEvents); } outputEvents.getDataOut().add(dataout); String outputExpr = "${coord:dataOut('" + name + type + "')}"; props.put(name + "." + type, outputExpr); } //RESUME CHECKSTYLE CHECK ParameterNumberCheck protected void propagateCatalogTableProperties(Input input, CatalogStorage tableStorage, Properties props) { String prefix = "falcon_" + input.getName(); propagateCommonCatalogTableProperties(tableStorage, props, prefix); props.put(prefix + "_partition_filter_pig", "${coord:dataInPartitionFilter('" + input.getName() + "', 'pig')}"); props.put(prefix + "_partition_filter_hive", "${coord:dataInPartitionFilter('" + input.getName() + "', 'hive')}"); props.put(prefix + "_partition_filter_java", "${coord:dataInPartitionFilter('" + input.getName() + "', 'java')}"); } }